mirror of
https://github.com/mwisnowski/mtg_python_deckbuilder.git
synced 2025-12-16 15:40:12 +01:00
feat(random): multi-theme groundwork, locked reroll export parity, duplicate export fix, expanded diagnostics and test coverage
This commit is contained in:
parent
a029d430c5
commit
73685f22c8
39 changed files with 2671 additions and 271 deletions
44
.env.example
44
.env.example
|
|
@ -13,7 +13,7 @@
|
|||
# HOST=0.0.0.0 # Uvicorn bind host (only when APP_MODE=web).
|
||||
# PORT=8080 # Uvicorn port.
|
||||
# WORKERS=1 # Uvicorn worker count.
|
||||
APP_VERSION=v2.2.9 # Matches dockerhub compose.
|
||||
APP_VERSION=v2.2.10 # Matches dockerhub compose.
|
||||
|
||||
############################
|
||||
# Theming
|
||||
|
|
@ -27,6 +27,8 @@ THEME=system # system|light|dark (initial default; user p
|
|||
# DECK_EXPORTS=/app/deck_files # Where finished deck exports are read by Web UI.
|
||||
# OWNED_CARDS_DIR=/app/owned_cards # Preferred directory for owned inventory uploads.
|
||||
# CARD_LIBRARY_DIR=/app/owned_cards # Back-compat alias for OWNED_CARDS_DIR.
|
||||
# CSV_FILES_DIR=/app/csv_files # Override CSV base dir (use test snapshots or alternate datasets)
|
||||
# CARD_INDEX_EXTRA_CSV= # Inject an extra CSV into the card index for testing
|
||||
|
||||
############################
|
||||
# Web UI Feature Flags
|
||||
|
|
@ -41,6 +43,14 @@ WEB_VIRTUALIZE=1 # dockerhub: WEB_VIRTUALIZE="1"
|
|||
ALLOW_MUST_HAVES=1 # dockerhub: ALLOW_MUST_HAVES="1"
|
||||
WEB_THEME_PICKER_DIAGNOSTICS=0 # 1=enable uncapped synergies, diagnostics fields & /themes/metrics (dev only)
|
||||
|
||||
############################
|
||||
# Random Modes (alpha)
|
||||
############################
|
||||
# RANDOM_MODES=1 # Enable backend random build endpoints
|
||||
# RANDOM_UI=1 # Show Surprise/Reroll/Share controls in UI
|
||||
# RANDOM_MAX_ATTEMPTS=5 # Cap retry attempts for constrained random builds
|
||||
# RANDOM_TIMEOUT_MS=5000 # Per-attempt timeout (ms)
|
||||
|
||||
############################
|
||||
# Automation & Performance (Web)
|
||||
############################
|
||||
|
|
@ -50,6 +60,8 @@ WEB_TAG_PARALLEL=1 # dockerhub: WEB_TAG_PARALLEL="1"
|
|||
WEB_TAG_WORKERS=2 # dockerhub: WEB_TAG_WORKERS="4"
|
||||
WEB_AUTO_ENFORCE=0 # dockerhub: WEB_AUTO_ENFORCE="0"
|
||||
# WEB_CUSTOM_EXPORT_BASE= # Custom basename for exports (optional).
|
||||
# THEME_CATALOG_YAML_SCAN_INTERVAL_SEC=2.0 # Poll for YAML changes (dev)
|
||||
# WEB_THEME_FILTER_PREWARM=0 # 1=prewarm common filters for faster first renders
|
||||
|
||||
############################
|
||||
# Headless Export Options
|
||||
|
|
@ -116,11 +128,41 @@ DEBIAN_FRONTEND=noninteractive # Suppress apt UI in Docker builds.
|
|||
# EDITORIAL_MIN_EXAMPLES=0 # (Future) minimum curated examples (cards/commanders) target.
|
||||
# EDITORIAL_MIN_EXAMPLES_ENFORCE=0 # (Future) enforce vs warn.
|
||||
|
||||
############################
|
||||
# Sampling & Rarity Tuning (advanced)
|
||||
############################
|
||||
# SPLASH_ADAPTIVE=0 # 1=enable adaptive off-color penalty
|
||||
# SPLASH_ADAPTIVE_SCALE=1:1.0,2:1.0,3:1.0,4:0.6,5:0.35
|
||||
# RARITY_W_MYTHIC=1.2
|
||||
# RARITY_W_RARE=0.9
|
||||
# RARITY_W_UNCOMMON=0.65
|
||||
# RARITY_W_COMMON=0.4
|
||||
# RARITY_DIVERSITY_TARGETS=mythic:0-1,rare:0-2,uncommon:0-4,common:0-6
|
||||
# RARITY_DIVERSITY_OVER_PENALTY=-0.5
|
||||
|
||||
############################
|
||||
# Theme Preview Cache & Redis (optional)
|
||||
############################
|
||||
# THEME_PREVIEW_CACHE_MAX=400 # Max previews cached in memory
|
||||
# WEB_THEME_PREVIEW_LOG=0 # 1=verbose cache logs
|
||||
# THEME_PREVIEW_ADAPTIVE=0 # 1=adaptive cache policy
|
||||
# THEME_PREVIEW_EVICT_COST_THRESHOLDS=5,15,40
|
||||
# THEME_PREVIEW_BG_REFRESH=0 # 1=background refresh worker
|
||||
# THEME_PREVIEW_BG_REFRESH_INTERVAL=120 # seconds
|
||||
# THEME_PREVIEW_TTL_BASE=300
|
||||
# THEME_PREVIEW_TTL_MIN=60
|
||||
# THEME_PREVIEW_TTL_MAX=900
|
||||
# THEME_PREVIEW_TTL_BANDS=0.2,0.5,0.8
|
||||
# THEME_PREVIEW_TTL_STEPS=2,4,2,3,1
|
||||
# THEME_PREVIEW_REDIS_URL=redis://localhost:6379/0
|
||||
# THEME_PREVIEW_REDIS_DISABLE=0 # 1=disable redis even if URL set
|
||||
|
||||
|
||||
######################################################################
|
||||
# Notes
|
||||
# - CLI arguments override env vars; env overrides JSON config; JSON overrides defaults.
|
||||
# - For include/exclude card functionality enable ALLOW_MUST_HAVES=1 (Web) and use UI or CLI flags.
|
||||
# - For Random Modes UI, set RANDOM_MODES=1 and RANDOM_UI=1; see /random.
|
||||
# - Path overrides must point to mounted volumes inside the container.
|
||||
# - Remove a value or leave it commented to fall back to internal defaults.
|
||||
######################################################################
|
||||
|
|
|
|||
13
CHANGELOG.md
13
CHANGELOG.md
|
|
@ -14,6 +14,14 @@ This format follows Keep a Changelog principles and aims for Semantic Versioning
|
|||
|
||||
## [Unreleased]
|
||||
### Added
|
||||
- Random Mode multi-theme groundwork: backend now supports `primary_theme`, `secondary_theme`, `tertiary_theme` with deterministic AND-combination cascade (P+S+T → P+S → P+T → P → synergy-overlap → full pool). Diagnostics fields (`resolved_themes`, `combo_fallback`, `synergy_fallback`, `fallback_reason`) added to `RandomBuildResult` (UI wiring pending).
|
||||
- Locked commander reroll path now produces full artifact parity (CSV, TXT, compliance JSON, summary JSON) identical to Surprise builds.
|
||||
- Random reroll tests for: commander lock invariance, artifact presence, duplicate export prevention, and form vs JSON submission.
|
||||
- Roadmap document `logs/roadmaps/random_multi_theme_roadmap.md` capturing design, fallback strategy, diagnostics, and incremental delivery plan.
|
||||
- Random Modes diagnostics: surfaced attempts, timeout_hit, and retries_exhausted in API responses and the HTMX result fragment (gated by SHOW_DIAGNOSTICS); added tests covering retries-exhausted and timeout paths and enabled friendly labels in the UI.
|
||||
- Random Full Build export parity: random full deck builds now produce the standard artifact set — `<stem>.csv`, `<stem>.txt`, `<stem>_compliance.json` (bracket policy report), and `<stem>.summary.json` (summary with `meta.random` seed/theme/constraints). The random full build API response now includes `csv_path`, `txt_path`, and `compliance` keys (paths) for immediate consumption.
|
||||
- Environment toggle (opt-out) `RANDOM_BUILD_SUPPRESS_INITIAL_EXPORT` (defaults to active automatically) lets you revert to legacy double-export behavior for debugging by setting `RANDOM_BUILD_SUPPRESS_INITIAL_EXPORT=0`.
|
||||
- Tests: added random full build export test ensuring exactly one CSV/TXT pair (no `_1` duplicates) plus sidecar JSON artifacts.
|
||||
- Taxonomy snapshot CLI (`code/scripts/snapshot_taxonomy.py`): writes an auditable JSON snapshot of BRACKET_DEFINITIONS to `logs/taxonomy_snapshots/` with a deterministic SHA-256 hash; skips duplicates unless forced.
|
||||
- Optional adaptive splash penalty (feature flag): enable with `SPLASH_ADAPTIVE=1`; tuning via `SPLASH_ADAPTIVE_SCALE` (default `1:1.0,2:1.0,3:1.0,4:0.6,5:0.35`).
|
||||
- Splash penalty analytics: counters now include total off-color cards and penalty reason events; structured logs include event details to support tuning.
|
||||
|
|
@ -39,7 +47,10 @@ This format follows Keep a Changelog principles and aims for Semantic Versioning
|
|||
- Optional multi-pass performance CI variant (`preview_perf_ci_check.py --multi-pass`) to collect cold vs warm pass stats when diagnosing divergence.
|
||||
|
||||
### Changed
|
||||
- Random reroll (locked commander) export flow: now reuses builder-exported artifacts when present and records `last_csv_path` / `last_txt_path` inside the headless runner to avoid duplicate suffixed files.
|
||||
- Summary sidecars for random builds include `locked_commander` flag when rerolling same commander.
|
||||
- Splash analytics recognize both static and adaptive penalty reasons (shared prefix handling), so existing dashboards continue to work when `SPLASH_ADAPTIVE=1`.
|
||||
- Random full builds now internally force `RANDOM_BUILD_SUPPRESS_INITIAL_EXPORT=1` (if unset) ensuring only the orchestrated export path executes (eliminates historical duplicate `*_1.csv` / `*_1.txt`). Set `RANDOM_BUILD_SUPPRESS_INITIAL_EXPORT=0` to intentionally restore the legacy double-export (not recommended outside debugging).
|
||||
- Picker list & API use optimized fast filtering path (`filter_slugs_fast`) replacing per-request linear scans.
|
||||
- Preview sampling: curated examples pinned first, diversity quotas (~40% payoff / 40% enabler+support / 20% wildcard), synthetic placeholders only if underfilled.
|
||||
- Sampling refinements: rarity diminishing weight, splash leniency (single off-color allowance with penalty for 4–5 color commanders), role saturation penalty, refined commander overlap scaling curve.
|
||||
|
|
@ -55,6 +66,8 @@ This format follows Keep a Changelog principles and aims for Semantic Versioning
|
|||
- Removed redundant template environment instantiation causing inconsistent navigation state.
|
||||
- Ensured preview cache key includes catalog ETag to prevent stale sample reuse after catalog reload.
|
||||
- Explicit cache bust after tagging/catalog rebuild prevents stale preview exposure.
|
||||
- Random build duplicate export issue resolved: suppression of the initial builder auto-export prevents creation of suffixed duplicate decklists.
|
||||
- Random Mode UI regressions (deck summary toggle & hover preview) fixed by replacing deferred script execution with inline handlers and an HTMX load hook.
|
||||
|
||||
### Editorial / Themes
|
||||
- Enforce minimum `example_commanders` threshold (>=5) in CI; lint fails builds when a non-alias theme drops below threshold.
|
||||
|
|
|
|||
|
|
@ -198,6 +198,7 @@ To force a new snapshot even when the content hash matches the latest, pass `--f
|
|||
- RANDOM_MODES=1 (enable random build endpoints)
|
||||
- RANDOM_UI=1 (show Surprise/Theme/Reroll/Share controls)
|
||||
- RANDOM_MAX_ATTEMPTS=5 (cap retry attempts)
|
||||
- (Upcoming) Multi-theme inputs: once UI ships, Random Mode will accept `primary_theme`, `secondary_theme`, `tertiary_theme` fields; current backend already supports the cascade + diagnostics.
|
||||
- RANDOM_TIMEOUT_MS=5000 (per-build timeout in ms)
|
||||
|
||||
Testing/determinism helper (dev):
|
||||
|
|
|
|||
BIN
README.md
BIN
README.md
Binary file not shown.
|
|
@ -3,7 +3,11 @@
|
|||
## Unreleased (Draft)
|
||||
|
||||
### Added
|
||||
- Random Mode multi-theme groundwork: backend accepts `primary_theme`, `secondary_theme`, `tertiary_theme` and computes a resolved combination with ordered fallback (triple → P+S → P+T → P → synergy token overlap → full pool). Exposes diagnostics (`resolved_themes`, `combo_fallback`, `synergy_fallback`, `fallback_reason`) for upcoming UI integration.
|
||||
- Locked commander reroll now outputs the full export artifact set (CSV, TXT, compliance, summary) with duplicate prevention.
|
||||
- Taxonomy snapshot utility (`python -m code.scripts.snapshot_taxonomy`): captures an auditable JSON of BRACKET_DEFINITIONS under `logs/taxonomy_snapshots/` with a content hash. Safe to run any time; subsequent identical snapshots are skipped.
|
||||
- Random Full Build export parity: random full builds now emit the full artifact set (`.csv`, `.txt`, `_compliance.json`, `.summary.json`) matching standard builds; API includes `csv_path`, `txt_path`, and `compliance` path fields.
|
||||
- Opt-out env var `RANDOM_BUILD_SUPPRESS_INITIAL_EXPORT` (defaults to suppressed) allows re-enabling legacy double-export for debugging when set to `0`.
|
||||
- Optional adaptive splash penalty (experiment): enable with `SPLASH_ADAPTIVE=1`; scale per commander color count with `SPLASH_ADAPTIVE_SCALE` (default `1:1.0,2:1.0,3:1.0,4:0.6,5:0.35`). Reasons are emitted as `splash_off_color_penalty_adaptive:<colors>:<value>`.
|
||||
- Analytics: splash penalty counters recognize both static and adaptive reasons; compare deltas with the flag toggled.
|
||||
- Theme picker performance: precomputed summary projections + lowercase haystacks and memoized filtered slug cache (keyed by (etag, q, archetype, bucket, colors)) for sub‑50ms typical list queries on warm path.
|
||||
|
|
@ -20,7 +24,9 @@
|
|||
- Server authoritative mana & color identity fields (`mana_cost`, `color_identity_list`, `pip_colors`) included in preview/export; legacy client parsers removed.
|
||||
|
||||
### Changed
|
||||
- Random reroll export logic deduplicated by persisting `last_csv_path` / `last_txt_path` from headless runs; avoids creation of `*_1` suffixed artifacts on reroll.
|
||||
- Splash analytics updated to count both static and adaptive penalty reasons via a shared prefix, keeping historical dashboards intact.
|
||||
- Random full builds internally auto-set `RANDOM_BUILD_SUPPRESS_INITIAL_EXPORT=1` (unless explicitly provided) to eliminate duplicate suffixed decklists.
|
||||
- Preview assembly now pins curated `example_cards` then `synergy_example_cards` before heuristic sampling with diversity quotas (~40% payoff, 40% enabler/support, 20% wildcard) and synthetic placeholders only when underfilled.
|
||||
- List & API filtering route migrated to optimized path avoiding repeated concatenation / casefolding work each request.
|
||||
- Hover system consolidated to one global panel; removed fragment-specific duplicate & legacy large-image hover. Thumbnails enlarged & unified (110px → 165px → 230px). Hover activation limited to thumbnails; stability improved (no dismissal over flip control); DFC markup simplified to single <img> with opacity transition.
|
||||
|
|
@ -32,6 +38,7 @@
|
|||
### Fixed
|
||||
- Resolved duplicate template environment instantiation causing inconsistent navigation globals in picker fragments.
|
||||
- Ensured preview cache key includes catalog ETag preventing stale samples after catalog reload.
|
||||
- Random build duplicate decklist exports removed; suppression of the initial builder auto-export prevents creation of `*_1.csv` / `*_1.txt` artifacts.
|
||||
|
||||
---
|
||||
|
||||
|
|
|
|||
|
|
@ -183,73 +183,94 @@ class DeckBuilder(
|
|||
except Exception:
|
||||
pass
|
||||
if hasattr(self, 'export_decklist_csv'):
|
||||
# If user opted out of owned-only, silently load all owned files for marking
|
||||
try:
|
||||
if not self.use_owned_only and not self.owned_card_names:
|
||||
self._load_all_owned_silent()
|
||||
except Exception:
|
||||
pass
|
||||
csv_path = self.export_decklist_csv()
|
||||
suppress_export = False
|
||||
try:
|
||||
import os as _os
|
||||
base, _ext = _os.path.splitext(_os.path.basename(csv_path))
|
||||
txt_path = self.export_decklist_text(filename=base + '.txt') # type: ignore[attr-defined]
|
||||
# Display the text file contents for easy copy/paste to online deck builders
|
||||
self._display_txt_contents(txt_path)
|
||||
# Compute bracket compliance and save a JSON report alongside exports
|
||||
suppress_export = _os.getenv('RANDOM_BUILD_SUPPRESS_INITIAL_EXPORT') == '1'
|
||||
except Exception:
|
||||
suppress_export = False
|
||||
if not suppress_export:
|
||||
# If user opted out of owned-only, silently load all owned files for marking
|
||||
try:
|
||||
if hasattr(self, 'compute_and_print_compliance'):
|
||||
report0 = self.compute_and_print_compliance(base_stem=base) # type: ignore[attr-defined]
|
||||
# If non-compliant and interactive, offer enforcement now
|
||||
if not self.use_owned_only and not self.owned_card_names:
|
||||
self._load_all_owned_silent()
|
||||
except Exception:
|
||||
pass
|
||||
csv_path = self.export_decklist_csv()
|
||||
# Persist CSV path immediately (before any later potential exceptions)
|
||||
try:
|
||||
self.last_csv_path = csv_path # type: ignore[attr-defined]
|
||||
except Exception:
|
||||
pass
|
||||
try:
|
||||
import os as _os
|
||||
base, _ext = _os.path.splitext(_os.path.basename(csv_path))
|
||||
txt_path = self.export_decklist_text(filename=base + '.txt') # type: ignore[attr-defined]
|
||||
try:
|
||||
self.last_txt_path = txt_path # type: ignore[attr-defined]
|
||||
except Exception:
|
||||
pass
|
||||
# Display the text file contents for easy copy/paste to online deck builders
|
||||
self._display_txt_contents(txt_path)
|
||||
# Compute bracket compliance and save a JSON report alongside exports
|
||||
try:
|
||||
if hasattr(self, 'compute_and_print_compliance'):
|
||||
report0 = self.compute_and_print_compliance(base_stem=base) # type: ignore[attr-defined]
|
||||
# If non-compliant and interactive, offer enforcement now
|
||||
try:
|
||||
if isinstance(report0, dict) and report0.get('overall') == 'FAIL' and not getattr(self, 'headless', False):
|
||||
from deck_builder.phases.phase6_reporting import ReportingMixin as _RM # type: ignore
|
||||
if isinstance(self, _RM) and hasattr(self, 'enforce_and_reexport'):
|
||||
self.output_func("One or more bracket limits exceeded. Enter to auto-resolve, or Ctrl+C to skip.")
|
||||
try:
|
||||
_ = self.input_func("")
|
||||
except Exception:
|
||||
pass
|
||||
self.enforce_and_reexport(base_stem=base, mode='prompt') # type: ignore[attr-defined]
|
||||
except Exception:
|
||||
pass
|
||||
except Exception:
|
||||
pass
|
||||
# If owned-only build is incomplete, generate recommendations
|
||||
try:
|
||||
total_cards = sum(int(v.get('Count', 1)) for v in self.card_library.values())
|
||||
if self.use_owned_only and total_cards < 100:
|
||||
missing = 100 - total_cards
|
||||
rec_limit = int(math.ceil(1.5 * float(missing)))
|
||||
self._generate_recommendations(base_stem=base, limit=rec_limit)
|
||||
except Exception:
|
||||
pass
|
||||
# Also export a matching JSON config for replay (interactive builds only)
|
||||
if not getattr(self, 'headless', False):
|
||||
try:
|
||||
if isinstance(report0, dict) and report0.get('overall') == 'FAIL' and not getattr(self, 'headless', False):
|
||||
from deck_builder.phases.phase6_reporting import ReportingMixin as _RM # type: ignore
|
||||
if isinstance(self, _RM) and hasattr(self, 'enforce_and_reexport'):
|
||||
self.output_func("One or more bracket limits exceeded. Enter to auto-resolve, or Ctrl+C to skip.")
|
||||
try:
|
||||
_ = self.input_func("")
|
||||
except Exception:
|
||||
pass
|
||||
self.enforce_and_reexport(base_stem=base, mode='prompt') # type: ignore[attr-defined]
|
||||
import os as _os
|
||||
cfg_path_env = _os.getenv('DECK_CONFIG')
|
||||
cfg_dir = None
|
||||
if cfg_path_env:
|
||||
cfg_dir = _os.path.dirname(cfg_path_env) or '.'
|
||||
elif _os.path.isdir('/app/config'):
|
||||
cfg_dir = '/app/config'
|
||||
else:
|
||||
cfg_dir = 'config'
|
||||
if cfg_dir:
|
||||
_os.makedirs(cfg_dir, exist_ok=True)
|
||||
self.export_run_config_json(directory=cfg_dir, filename=base + '.json') # type: ignore[attr-defined]
|
||||
if cfg_path_env:
|
||||
cfg_dir2 = _os.path.dirname(cfg_path_env) or '.'
|
||||
cfg_name2 = _os.path.basename(cfg_path_env)
|
||||
_os.makedirs(cfg_dir2, exist_ok=True)
|
||||
self.export_run_config_json(directory=cfg_dir2, filename=cfg_name2) # type: ignore[attr-defined]
|
||||
except Exception:
|
||||
pass
|
||||
except Exception:
|
||||
pass
|
||||
# If owned-only build is incomplete, generate recommendations
|
||||
logger.warning("Plaintext export failed (non-fatal)")
|
||||
else:
|
||||
# Mark suppression so random flow knows nothing was exported yet
|
||||
try:
|
||||
total_cards = sum(int(v.get('Count', 1)) for v in self.card_library.values())
|
||||
if self.use_owned_only and total_cards < 100:
|
||||
missing = 100 - total_cards
|
||||
rec_limit = int(math.ceil(1.5 * float(missing)))
|
||||
self._generate_recommendations(base_stem=base, limit=rec_limit)
|
||||
self.last_csv_path = None # type: ignore[attr-defined]
|
||||
self.last_txt_path = None # type: ignore[attr-defined]
|
||||
except Exception:
|
||||
pass
|
||||
# Also export a matching JSON config for replay (interactive builds only)
|
||||
if not getattr(self, 'headless', False):
|
||||
try:
|
||||
# Choose config output dir: DECK_CONFIG dir > /app/config > ./config
|
||||
import os as _os
|
||||
cfg_path_env = _os.getenv('DECK_CONFIG')
|
||||
cfg_dir = None
|
||||
if cfg_path_env:
|
||||
cfg_dir = _os.path.dirname(cfg_path_env) or '.'
|
||||
elif _os.path.isdir('/app/config'):
|
||||
cfg_dir = '/app/config'
|
||||
else:
|
||||
cfg_dir = 'config'
|
||||
if cfg_dir:
|
||||
_os.makedirs(cfg_dir, exist_ok=True)
|
||||
self.export_run_config_json(directory=cfg_dir, filename=base + '.json') # type: ignore[attr-defined]
|
||||
# Also, if DECK_CONFIG explicitly points to a file path, write exactly there too
|
||||
if cfg_path_env:
|
||||
cfg_dir2 = _os.path.dirname(cfg_path_env) or '.'
|
||||
cfg_name2 = _os.path.basename(cfg_path_env)
|
||||
_os.makedirs(cfg_dir2, exist_ok=True)
|
||||
self.export_run_config_json(directory=cfg_dir2, filename=cfg_name2) # type: ignore[attr-defined]
|
||||
except Exception:
|
||||
pass
|
||||
except Exception:
|
||||
logger.warning("Plaintext export failed (non-fatal)")
|
||||
# If owned-only and deck not complete, print a note
|
||||
try:
|
||||
if self.use_owned_only:
|
||||
|
|
|
|||
|
|
@ -10,12 +10,37 @@ from deck_builder import builder_constants as bc
|
|||
from random_util import get_random, generate_seed
|
||||
|
||||
|
||||
class RandomBuildError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class RandomConstraintsImpossibleError(RandomBuildError):
|
||||
def __init__(self, message: str, *, constraints: Optional[Dict[str, Any]] = None, pool_size: Optional[int] = None):
|
||||
super().__init__(message)
|
||||
self.constraints = constraints or {}
|
||||
self.pool_size = int(pool_size or 0)
|
||||
|
||||
|
||||
@dataclass
|
||||
class RandomBuildResult:
|
||||
seed: int
|
||||
commander: str
|
||||
theme: Optional[str]
|
||||
constraints: Optional[Dict[str, Any]]
|
||||
# Extended multi-theme support
|
||||
primary_theme: Optional[str] = None
|
||||
secondary_theme: Optional[str] = None
|
||||
tertiary_theme: Optional[str] = None
|
||||
resolved_themes: List[str] | None = None # actual AND-combination used for filtering (case-preserved)
|
||||
# Diagnostics / fallback metadata
|
||||
theme_fallback: bool = False # original single-theme fallback (legacy)
|
||||
original_theme: Optional[str] = None
|
||||
combo_fallback: bool = False # when we had to drop one or more secondary/tertiary themes
|
||||
synergy_fallback: bool = False # when primary itself had no matches and we broadened based on loose overlap
|
||||
fallback_reason: Optional[str] = None
|
||||
attempts_tried: int = 0
|
||||
timeout_hit: bool = False
|
||||
retries_exhausted: bool = False
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
return {
|
||||
|
|
@ -34,20 +59,155 @@ def _load_commanders_df() -> pd.DataFrame:
|
|||
return pd.read_csv(bc.COMMANDER_CSV_PATH, converters=getattr(bc, "COMMANDER_CONVERTERS", None))
|
||||
|
||||
|
||||
def _filter_by_theme(df: pd.DataFrame, theme: Optional[str]) -> pd.DataFrame:
|
||||
if not theme:
|
||||
return df
|
||||
t = str(theme).strip().lower()
|
||||
def _normalize_tag(value: Optional[str]) -> Optional[str]:
|
||||
if value is None:
|
||||
return None
|
||||
v = str(value).strip()
|
||||
return v if v else None
|
||||
|
||||
|
||||
def _filter_multi(df: pd.DataFrame, primary: Optional[str], secondary: Optional[str], tertiary: Optional[str]) -> tuple[pd.DataFrame, Dict[str, Any]]:
|
||||
"""Return filtered commander dataframe based on ordered fallback strategy.
|
||||
|
||||
Strategy (P = primary, S = secondary, T = tertiary):
|
||||
1. If all P,S,T provided → try P&S&T
|
||||
2. If no triple match → try P&S
|
||||
3. If no P&S → try P&T (treat tertiary as secondary weight-wise)
|
||||
4. If no P+{S|T} → try P alone
|
||||
5. If P alone empty → attempt loose synergy fallback (any commander whose themeTags share a word with P)
|
||||
6. Else full pool fallback (ultimate guard)
|
||||
|
||||
Returns (filtered_df, diagnostics_dict)
|
||||
diagnostics_dict keys:
|
||||
- resolved_themes: list[str]
|
||||
- combo_fallback: bool
|
||||
- synergy_fallback: bool
|
||||
- fallback_reason: str | None
|
||||
"""
|
||||
diag: Dict[str, Any] = {
|
||||
"resolved_themes": None,
|
||||
"combo_fallback": False,
|
||||
"synergy_fallback": False,
|
||||
"fallback_reason": None,
|
||||
}
|
||||
# Normalize to lowercase for comparison but preserve original for reporting
|
||||
p = _normalize_tag(primary)
|
||||
s = _normalize_tag(secondary)
|
||||
t = _normalize_tag(tertiary)
|
||||
# Helper to test AND-combo
|
||||
def and_filter(req: List[str]) -> pd.DataFrame:
|
||||
if not req:
|
||||
return df
|
||||
req_l = [r.lower() for r in req]
|
||||
try:
|
||||
mask = df.get("themeTags").apply(lambda tags: all(any(str(x).strip().lower() == r for x in (tags or [])) for r in req_l))
|
||||
return df[mask]
|
||||
except Exception:
|
||||
return df.iloc[0:0]
|
||||
|
||||
# 1. Triple
|
||||
if p and s and t:
|
||||
triple = and_filter([p, s, t])
|
||||
if len(triple) > 0:
|
||||
diag["resolved_themes"] = [p, s, t]
|
||||
return triple, diag
|
||||
# 2. P+S
|
||||
if p and s:
|
||||
ps = and_filter([p, s])
|
||||
if len(ps) > 0:
|
||||
if t:
|
||||
diag["combo_fallback"] = True
|
||||
diag["fallback_reason"] = "No commanders matched all three themes; using Primary+Secondary"
|
||||
diag["resolved_themes"] = [p, s]
|
||||
return ps, diag
|
||||
# 3. P+T
|
||||
if p and t:
|
||||
pt = and_filter([p, t])
|
||||
if len(pt) > 0:
|
||||
if s:
|
||||
diag["combo_fallback"] = True
|
||||
diag["fallback_reason"] = "No commanders matched requested combinations; using Primary+Tertiary"
|
||||
diag["resolved_themes"] = [p, t]
|
||||
return pt, diag
|
||||
# 4. P only
|
||||
if p:
|
||||
p_only = and_filter([p])
|
||||
if len(p_only) > 0:
|
||||
if s or t:
|
||||
diag["combo_fallback"] = True
|
||||
diag["fallback_reason"] = "No multi-theme combination matched; using Primary only"
|
||||
diag["resolved_themes"] = [p]
|
||||
return p_only, diag
|
||||
# 5. Synergy fallback based on primary token overlaps
|
||||
if p:
|
||||
words = [w for w in p.replace('-', ' ').split() if w]
|
||||
if words:
|
||||
try:
|
||||
mask = df.get("themeTags").apply(
|
||||
lambda tags: any(
|
||||
any(w == str(x).strip().lower() or w in str(x).strip().lower() for w in words)
|
||||
for x in (tags or [])
|
||||
)
|
||||
)
|
||||
synergy_df = df[mask]
|
||||
if len(synergy_df) > 0:
|
||||
diag["resolved_themes"] = words # approximate overlap tokens
|
||||
diag["combo_fallback"] = True
|
||||
diag["synergy_fallback"] = True
|
||||
diag["fallback_reason"] = "Primary theme had no direct matches; using synergy overlap"
|
||||
return synergy_df, diag
|
||||
except Exception:
|
||||
pass
|
||||
# 6. Full pool fallback
|
||||
diag["resolved_themes"] = []
|
||||
diag["combo_fallback"] = True
|
||||
diag["synergy_fallback"] = True
|
||||
diag["fallback_reason"] = "No theme matches found; using full commander pool"
|
||||
return df, diag
|
||||
|
||||
|
||||
def _candidate_ok(candidate: str, constraints: Optional[Dict[str, Any]]) -> bool:
|
||||
"""Check simple feasibility filters from constraints.
|
||||
|
||||
Supported keys (lightweight, safe defaults):
|
||||
- reject_all: bool -> if True, reject every candidate (useful for retries-exhausted tests)
|
||||
- reject_names: list[str] -> reject these specific names
|
||||
"""
|
||||
if not constraints:
|
||||
return True
|
||||
try:
|
||||
mask = df.get("themeTags").apply(
|
||||
lambda tags: any(str(x).strip().lower() == t for x in (tags or []))
|
||||
)
|
||||
sub = df[mask]
|
||||
if len(sub) > 0:
|
||||
return sub
|
||||
if constraints.get("reject_all"):
|
||||
return False
|
||||
except Exception:
|
||||
pass
|
||||
return df
|
||||
try:
|
||||
rej = constraints.get("reject_names")
|
||||
if isinstance(rej, (list, tuple)) and any(str(candidate) == str(x) for x in rej):
|
||||
return False
|
||||
except Exception:
|
||||
pass
|
||||
return True
|
||||
|
||||
|
||||
def _check_constraints(candidate_count: int, constraints: Optional[Dict[str, Any]]) -> None:
|
||||
if not constraints:
|
||||
return
|
||||
try:
|
||||
req_min = constraints.get("require_min_candidates") # type: ignore[attr-defined]
|
||||
except Exception:
|
||||
req_min = None
|
||||
if req_min is None:
|
||||
return
|
||||
try:
|
||||
req_min_int = int(req_min)
|
||||
except Exception:
|
||||
req_min_int = None
|
||||
if req_min_int is not None and candidate_count < req_min_int:
|
||||
raise RandomConstraintsImpossibleError(
|
||||
f"Not enough candidates to satisfy constraints (have {candidate_count}, require >= {req_min_int})",
|
||||
constraints=constraints,
|
||||
pool_size=candidate_count,
|
||||
)
|
||||
|
||||
|
||||
def build_random_deck(
|
||||
|
|
@ -56,6 +216,10 @@ def build_random_deck(
|
|||
seed: Optional[int | str] = None,
|
||||
attempts: int = 5,
|
||||
timeout_s: float = 5.0,
|
||||
# New multi-theme inputs (theme retained for backward compatibility as primary)
|
||||
primary_theme: Optional[str] = None,
|
||||
secondary_theme: Optional[str] = None,
|
||||
tertiary_theme: Optional[str] = None,
|
||||
) -> RandomBuildResult:
|
||||
"""Thin wrapper for random selection of a commander, deterministic when seeded.
|
||||
|
||||
|
|
@ -84,9 +248,17 @@ def build_random_deck(
|
|||
timeout_s = 5.0
|
||||
timeout_s = max(0.1, timeout_s)
|
||||
|
||||
# Load commander pool and apply theme filter (if any)
|
||||
# Resolve multi-theme inputs
|
||||
if primary_theme is None:
|
||||
primary_theme = theme # legacy single theme becomes primary
|
||||
df_all = _load_commanders_df()
|
||||
df = _filter_by_theme(df_all, theme)
|
||||
df, multi_diag = _filter_multi(df_all, primary_theme, secondary_theme, tertiary_theme)
|
||||
used_fallback = False
|
||||
original_theme = None
|
||||
if multi_diag.get("combo_fallback") or multi_diag.get("synergy_fallback"):
|
||||
# For legacy fields
|
||||
used_fallback = bool(multi_diag.get("combo_fallback"))
|
||||
original_theme = primary_theme if primary_theme else None
|
||||
# Stable ordering then seeded selection for deterministic behavior
|
||||
names: List[str] = sorted(df["name"].astype(str).tolist()) if not df.empty else []
|
||||
if not names:
|
||||
|
|
@ -96,22 +268,49 @@ def build_random_deck(
|
|||
# Absolute fallback for pathological cases
|
||||
names = ["Unknown Commander"]
|
||||
|
||||
# Constraint feasibility check (based on candidate count)
|
||||
_check_constraints(len(names), constraints)
|
||||
|
||||
# Simple attempt/timeout loop (placeholder for future constraints checks)
|
||||
start = time.time()
|
||||
pick = None
|
||||
for _ in range(attempts):
|
||||
attempts_tried = 0
|
||||
timeout_hit = False
|
||||
for i in range(attempts):
|
||||
if (time.time() - start) > timeout_s:
|
||||
timeout_hit = True
|
||||
break
|
||||
attempts_tried = i + 1
|
||||
idx = rng.randrange(0, len(names))
|
||||
candidate = names[idx]
|
||||
# For now, accept the first candidate; constraint hooks can be added here.
|
||||
pick = candidate
|
||||
break
|
||||
# Accept only if candidate passes simple feasibility filters
|
||||
if _candidate_ok(candidate, constraints):
|
||||
pick = candidate
|
||||
break
|
||||
# else continue and try another candidate until attempts/timeout
|
||||
retries_exhausted = (pick is None) and (not timeout_hit) and (attempts_tried >= attempts)
|
||||
if pick is None:
|
||||
# Timeout/attempts exhausted; choose deterministically based on seed modulo
|
||||
pick = names[resolved_seed % len(names)]
|
||||
|
||||
return RandomBuildResult(seed=int(resolved_seed), commander=pick, theme=theme, constraints=constraints or {})
|
||||
return RandomBuildResult(
|
||||
seed=int(resolved_seed),
|
||||
commander=pick,
|
||||
theme=primary_theme, # preserve prior contract
|
||||
constraints=constraints or {},
|
||||
primary_theme=primary_theme,
|
||||
secondary_theme=secondary_theme,
|
||||
tertiary_theme=tertiary_theme,
|
||||
resolved_themes=list(multi_diag.get("resolved_themes") or []),
|
||||
combo_fallback=bool(multi_diag.get("combo_fallback")),
|
||||
synergy_fallback=bool(multi_diag.get("synergy_fallback")),
|
||||
fallback_reason=multi_diag.get("fallback_reason"),
|
||||
theme_fallback=bool(used_fallback),
|
||||
original_theme=original_theme,
|
||||
attempts_tried=int(attempts_tried or (1 if pick else 0)),
|
||||
timeout_hit=bool(timeout_hit),
|
||||
retries_exhausted=bool(retries_exhausted),
|
||||
)
|
||||
|
||||
|
||||
__all__ = [
|
||||
|
|
@ -125,6 +324,10 @@ __all__ = [
|
|||
class RandomFullBuildResult(RandomBuildResult):
|
||||
decklist: List[Dict[str, Any]] | None = None
|
||||
diagnostics: Dict[str, Any] | None = None
|
||||
summary: Dict[str, Any] | None = None
|
||||
csv_path: str | None = None
|
||||
txt_path: str | None = None
|
||||
compliance: Dict[str, Any] | None = None
|
||||
|
||||
|
||||
def build_random_full_deck(
|
||||
|
|
@ -138,6 +341,7 @@ def build_random_full_deck(
|
|||
|
||||
Returns a compact result including the seed, commander, and a summarized decklist.
|
||||
"""
|
||||
t0 = time.time()
|
||||
base = build_random_deck(theme=theme, constraints=constraints, seed=seed, attempts=attempts, timeout_s=timeout_s)
|
||||
|
||||
# Run the full headless build with the chosen commander and the same seed
|
||||
|
|
@ -153,9 +357,148 @@ def build_random_full_deck(
|
|||
diagnostics={"error": f"headless runner unavailable: {e}"},
|
||||
)
|
||||
|
||||
# Run the full builder once; reuse object for summary + deck extraction
|
||||
# Default behavior: suppress the initial internal export so Random build controls artifacts.
|
||||
# (If user explicitly sets RANDOM_BUILD_SUPPRESS_INITIAL_EXPORT=0 we respect that.)
|
||||
try:
|
||||
import os as _os
|
||||
if _os.getenv('RANDOM_BUILD_SUPPRESS_INITIAL_EXPORT') is None:
|
||||
_os.environ['RANDOM_BUILD_SUPPRESS_INITIAL_EXPORT'] = '1'
|
||||
except Exception:
|
||||
pass
|
||||
builder = _run(command_name=base.commander, seed=base.seed)
|
||||
|
||||
# Summarize the decklist from builder.card_library
|
||||
# Build summary (may fail gracefully)
|
||||
summary: Dict[str, Any] | None = None
|
||||
try:
|
||||
if hasattr(builder, 'build_deck_summary'):
|
||||
summary = builder.build_deck_summary() # type: ignore[attr-defined]
|
||||
except Exception:
|
||||
summary = None
|
||||
|
||||
# Attempt to reuse existing export performed inside builder (headless run already exported)
|
||||
csv_path: str | None = None
|
||||
txt_path: str | None = None
|
||||
compliance: Dict[str, Any] | None = None
|
||||
try:
|
||||
import os as _os
|
||||
import json as _json
|
||||
csv_path = getattr(builder, 'last_csv_path', None) # type: ignore[attr-defined]
|
||||
txt_path = getattr(builder, 'last_txt_path', None) # type: ignore[attr-defined]
|
||||
if csv_path and isinstance(csv_path, str):
|
||||
base_path, _ = _os.path.splitext(csv_path)
|
||||
# If txt missing but expected, look for sibling
|
||||
if (not txt_path or not _os.path.isfile(str(txt_path))) and _os.path.isfile(base_path + '.txt'):
|
||||
txt_path = base_path + '.txt'
|
||||
# Load existing compliance if present
|
||||
comp_path = base_path + '_compliance.json'
|
||||
if _os.path.isfile(comp_path):
|
||||
try:
|
||||
with open(comp_path, 'r', encoding='utf-8') as _cf:
|
||||
compliance = _json.load(_cf)
|
||||
except Exception:
|
||||
compliance = None
|
||||
else:
|
||||
# Compute compliance if not already saved
|
||||
try:
|
||||
if hasattr(builder, 'compute_and_print_compliance'):
|
||||
compliance = builder.compute_and_print_compliance(base_stem=_os.path.basename(base_path)) # type: ignore[attr-defined]
|
||||
except Exception:
|
||||
compliance = None
|
||||
# Write summary sidecar if missing
|
||||
if summary:
|
||||
sidecar = base_path + '.summary.json'
|
||||
if not _os.path.isfile(sidecar):
|
||||
meta = {
|
||||
"commander": getattr(builder, 'commander_name', '') or getattr(builder, 'commander', ''),
|
||||
"tags": list(getattr(builder, 'selected_tags', []) or []) or [t for t in [getattr(builder, 'primary_tag', None), getattr(builder, 'secondary_tag', None), getattr(builder, 'tertiary_tag', None)] if t],
|
||||
"bracket_level": getattr(builder, 'bracket_level', None),
|
||||
"csv": csv_path,
|
||||
"txt": txt_path,
|
||||
"random_seed": base.seed,
|
||||
"random_theme": base.theme,
|
||||
"random_constraints": base.constraints or {},
|
||||
}
|
||||
try:
|
||||
custom_base = getattr(builder, 'custom_export_base', None)
|
||||
except Exception:
|
||||
custom_base = None
|
||||
if isinstance(custom_base, str) and custom_base.strip():
|
||||
meta["name"] = custom_base.strip()
|
||||
try:
|
||||
with open(sidecar, 'w', encoding='utf-8') as f:
|
||||
_json.dump({"meta": meta, "summary": summary}, f, ensure_ascii=False, indent=2)
|
||||
except Exception:
|
||||
pass
|
||||
else:
|
||||
# Fallback: export now (rare path if headless build skipped export)
|
||||
if hasattr(builder, 'export_decklist_csv'):
|
||||
try:
|
||||
# Before exporting, attempt to find an existing same-day base file (non-suffixed) to avoid duplicate export
|
||||
existing_base: str | None = None
|
||||
try:
|
||||
import glob as _glob
|
||||
today = time.strftime('%Y%m%d')
|
||||
# Commander slug approximation: remove non alnum underscores
|
||||
import re as _re
|
||||
cmdr = (getattr(builder, 'commander_name', '') or getattr(builder, 'commander', '') or '')
|
||||
slug = _re.sub(r'[^A-Za-z0-9_]+', '', cmdr) or 'deck'
|
||||
pattern = f"deck_files/{slug}_*_{today}.csv"
|
||||
for path in sorted(_glob.glob(pattern)):
|
||||
base_name = _os.path.basename(path)
|
||||
if '_1.csv' not in base_name: # prefer original
|
||||
existing_base = path
|
||||
break
|
||||
except Exception:
|
||||
existing_base = None
|
||||
if existing_base and _os.path.isfile(existing_base):
|
||||
csv_path = existing_base
|
||||
base_path, _ = _os.path.splitext(csv_path)
|
||||
else:
|
||||
tmp_csv = builder.export_decklist_csv() # type: ignore[attr-defined]
|
||||
stem_base, ext = _os.path.splitext(tmp_csv)
|
||||
if stem_base.endswith('_1'):
|
||||
original = stem_base[:-2] + ext
|
||||
if _os.path.isfile(original):
|
||||
csv_path = original
|
||||
else:
|
||||
csv_path = tmp_csv
|
||||
else:
|
||||
csv_path = tmp_csv
|
||||
base_path, _ = _os.path.splitext(csv_path)
|
||||
if hasattr(builder, 'export_decklist_text'):
|
||||
target_txt = base_path + '.txt'
|
||||
if _os.path.isfile(target_txt):
|
||||
txt_path = target_txt
|
||||
else:
|
||||
tmp_txt = builder.export_decklist_text(filename=_os.path.basename(base_path) + '.txt') # type: ignore[attr-defined]
|
||||
if tmp_txt.endswith('_1.txt') and _os.path.isfile(target_txt):
|
||||
txt_path = target_txt
|
||||
else:
|
||||
txt_path = tmp_txt
|
||||
if hasattr(builder, 'compute_and_print_compliance'):
|
||||
compliance = builder.compute_and_print_compliance(base_stem=_os.path.basename(base_path)) # type: ignore[attr-defined]
|
||||
if summary:
|
||||
sidecar = base_path + '.summary.json'
|
||||
if not _os.path.isfile(sidecar):
|
||||
meta = {
|
||||
"commander": getattr(builder, 'commander_name', '') or getattr(builder, 'commander', ''),
|
||||
"tags": list(getattr(builder, 'selected_tags', []) or []) or [t for t in [getattr(builder, 'primary_tag', None), getattr(builder, 'secondary_tag', None), getattr(builder, 'tertiary_tag', None)] if t],
|
||||
"bracket_level": getattr(builder, 'bracket_level', None),
|
||||
"csv": csv_path,
|
||||
"txt": txt_path,
|
||||
"random_seed": base.seed,
|
||||
"random_theme": base.theme,
|
||||
"random_constraints": base.constraints or {},
|
||||
}
|
||||
with open(sidecar, 'w', encoding='utf-8') as f:
|
||||
_json.dump({"meta": meta, "summary": summary}, f, ensure_ascii=False, indent=2)
|
||||
except Exception:
|
||||
pass
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Extract a simple decklist (name/count)
|
||||
deck_items: List[Dict[str, Any]] = []
|
||||
try:
|
||||
lib = getattr(builder, 'card_library', {}) or {}
|
||||
|
|
@ -169,7 +512,15 @@ def build_random_full_deck(
|
|||
except Exception:
|
||||
deck_items = []
|
||||
|
||||
diags: Dict[str, Any] = {"attempts": 1, "timeout_s": timeout_s}
|
||||
elapsed_ms = int((time.time() - t0) * 1000)
|
||||
diags: Dict[str, Any] = {
|
||||
"attempts": int(getattr(base, "attempts_tried", 1) or 1),
|
||||
"timeout_s": float(timeout_s),
|
||||
"elapsed_ms": elapsed_ms,
|
||||
"fallback": bool(base.theme_fallback),
|
||||
"timeout_hit": bool(getattr(base, "timeout_hit", False)),
|
||||
"retries_exhausted": bool(getattr(base, "retries_exhausted", False)),
|
||||
}
|
||||
return RandomFullBuildResult(
|
||||
seed=base.seed,
|
||||
commander=base.commander,
|
||||
|
|
@ -177,5 +528,9 @@ def build_random_full_deck(
|
|||
constraints=base.constraints or {},
|
||||
decklist=deck_items,
|
||||
diagnostics=diags,
|
||||
summary=summary,
|
||||
csv_path=csv_path,
|
||||
txt_path=txt_path,
|
||||
compliance=compliance,
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -304,15 +304,37 @@ def _export_outputs(builder: DeckBuilder) -> None:
|
|||
csv_path: Optional[str] = None
|
||||
try:
|
||||
csv_path = builder.export_decklist_csv() if hasattr(builder, "export_decklist_csv") else None
|
||||
# Persist for downstream reuse (e.g., random_entrypoint / reroll flows) so they don't re-export
|
||||
if csv_path:
|
||||
try:
|
||||
builder.last_csv_path = csv_path # type: ignore[attr-defined]
|
||||
except Exception:
|
||||
pass
|
||||
except Exception:
|
||||
csv_path = None
|
||||
try:
|
||||
if hasattr(builder, "export_decklist_text"):
|
||||
if csv_path:
|
||||
base = os.path.splitext(os.path.basename(csv_path))[0]
|
||||
builder.export_decklist_text(filename=base + ".txt")
|
||||
txt_generated: Optional[str] = None
|
||||
try:
|
||||
txt_generated = builder.export_decklist_text(filename=base + ".txt")
|
||||
finally:
|
||||
if txt_generated:
|
||||
try:
|
||||
builder.last_txt_path = txt_generated # type: ignore[attr-defined]
|
||||
except Exception:
|
||||
pass
|
||||
else:
|
||||
builder.export_decklist_text()
|
||||
txt_generated = None
|
||||
try:
|
||||
txt_generated = builder.export_decklist_text()
|
||||
finally:
|
||||
if txt_generated:
|
||||
try:
|
||||
builder.last_txt_path = txt_generated # type: ignore[attr-defined]
|
||||
except Exception:
|
||||
pass
|
||||
except Exception:
|
||||
pass
|
||||
if _should_export_json_headless() and hasattr(builder, "export_run_config_json") and csv_path:
|
||||
|
|
|
|||
77
code/tests/test_random_attempts_and_timeout.py
Normal file
77
code/tests/test_random_attempts_and_timeout.py
Normal file
|
|
@ -0,0 +1,77 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import importlib
|
||||
import os
|
||||
from starlette.testclient import TestClient
|
||||
|
||||
|
||||
def _mk_client(monkeypatch):
|
||||
# Enable Random Modes and point to test CSVs
|
||||
monkeypatch.setenv("RANDOM_MODES", "1")
|
||||
monkeypatch.setenv("RANDOM_UI", "1")
|
||||
monkeypatch.setenv("CSV_FILES_DIR", os.path.join("csv_files", "testdata"))
|
||||
# Keep defaults small for speed
|
||||
monkeypatch.setenv("RANDOM_MAX_ATTEMPTS", "3")
|
||||
monkeypatch.setenv("RANDOM_TIMEOUT_MS", "200")
|
||||
# Re-import app to pick up env
|
||||
app_module = importlib.import_module('code.web.app')
|
||||
importlib.reload(app_module)
|
||||
return TestClient(app_module.app)
|
||||
|
||||
|
||||
def test_retries_exhausted_flag_propagates(monkeypatch):
|
||||
client = _mk_client(monkeypatch)
|
||||
# Force rejection of every candidate to simulate retries exhaustion
|
||||
payload = {"seed": 1234, "constraints": {"reject_all": True}, "attempts": 2, "timeout_ms": 200}
|
||||
r = client.post('/api/random_full_build', json=payload)
|
||||
assert r.status_code == 200
|
||||
data = r.json()
|
||||
diag = data.get("diagnostics") or {}
|
||||
assert diag.get("attempts") >= 1
|
||||
assert diag.get("retries_exhausted") is True
|
||||
assert diag.get("timeout_hit") in {True, False}
|
||||
|
||||
|
||||
def test_timeout_hit_flag_propagates(monkeypatch):
|
||||
client = _mk_client(monkeypatch)
|
||||
# Force the time source in random_entrypoint to advance rapidly so the loop times out immediately
|
||||
re = importlib.import_module('deck_builder.random_entrypoint')
|
||||
class _FakeClock:
|
||||
def __init__(self):
|
||||
self.t = 0.0
|
||||
def time(self):
|
||||
# Advance time by 0.2s every call
|
||||
self.t += 0.2
|
||||
return self.t
|
||||
fake = _FakeClock()
|
||||
monkeypatch.setattr(re, 'time', fake, raising=True)
|
||||
# Use small timeout and large attempts; timeout path should be taken deterministically
|
||||
payload = {"seed": 4321, "attempts": 1000, "timeout_ms": 100}
|
||||
r = client.post('/api/random_full_build', json=payload)
|
||||
assert r.status_code == 200
|
||||
data = r.json()
|
||||
diag = data.get("diagnostics") or {}
|
||||
assert diag.get("attempts") >= 1
|
||||
assert diag.get("timeout_hit") is True
|
||||
|
||||
|
||||
def test_hx_fragment_includes_diagnostics_when_enabled(monkeypatch):
|
||||
client = _mk_client(monkeypatch)
|
||||
# Enable diagnostics in templates
|
||||
monkeypatch.setenv("SHOW_DIAGNOSTICS", "1")
|
||||
monkeypatch.setenv("RANDOM_UI", "1")
|
||||
app_module = importlib.import_module('code.web.app')
|
||||
importlib.reload(app_module)
|
||||
client = TestClient(app_module.app)
|
||||
|
||||
headers = {
|
||||
"HX-Request": "true",
|
||||
"Content-Type": "application/json",
|
||||
"Accept": "text/html, */*; q=0.1",
|
||||
}
|
||||
r = client.post("/hx/random_reroll", data='{"seed": 10, "constraints": {"reject_all": true}, "attempts": 2, "timeout_ms": 200}', headers=headers)
|
||||
assert r.status_code == 200
|
||||
html = r.text
|
||||
# Should include attempts and at least one of the diagnostics flags text when enabled
|
||||
assert "attempts=" in html
|
||||
assert ("Retries exhausted" in html) or ("Timeout hit" in html)
|
||||
37
code/tests/test_random_determinism_delta.py
Normal file
37
code/tests/test_random_determinism_delta.py
Normal file
|
|
@ -0,0 +1,37 @@
|
|||
from __future__ import annotations
|
||||
import importlib
|
||||
import os
|
||||
from starlette.testclient import TestClient
|
||||
|
||||
|
||||
def _client(monkeypatch):
|
||||
monkeypatch.setenv('RANDOM_MODES', '1')
|
||||
monkeypatch.setenv('CSV_FILES_DIR', os.path.join('csv_files', 'testdata'))
|
||||
app_module = importlib.import_module('code.web.app')
|
||||
return TestClient(app_module.app)
|
||||
|
||||
|
||||
def test_same_seed_same_theme_same_constraints_identical(monkeypatch):
|
||||
client = _client(monkeypatch)
|
||||
body = {'seed': 2025, 'theme': 'Tokens'}
|
||||
r1 = client.post('/api/random_full_build', json=body)
|
||||
r2 = client.post('/api/random_full_build', json=body)
|
||||
assert r1.status_code == 200 and r2.status_code == 200
|
||||
d1, d2 = r1.json(), r2.json()
|
||||
assert d1['commander'] == d2['commander']
|
||||
assert d1['decklist'] == d2['decklist']
|
||||
|
||||
|
||||
def test_different_seed_yields_difference(monkeypatch):
|
||||
client = _client(monkeypatch)
|
||||
b1 = {'seed': 1111}
|
||||
b2 = {'seed': 1112}
|
||||
r1 = client.post('/api/random_full_build', json=b1)
|
||||
r2 = client.post('/api/random_full_build', json=b2)
|
||||
assert r1.status_code == 200 and r2.status_code == 200
|
||||
d1, d2 = r1.json(), r2.json()
|
||||
# Commander or at least one decklist difference
|
||||
if d1['commander'] == d2['commander']:
|
||||
assert d1['decklist'] != d2['decklist'], 'Expected decklist difference for different seeds'
|
||||
else:
|
||||
assert True
|
||||
72
code/tests/test_random_end_to_end_flow.py
Normal file
72
code/tests/test_random_end_to_end_flow.py
Normal file
|
|
@ -0,0 +1,72 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import base64
|
||||
import json
|
||||
from fastapi.testclient import TestClient
|
||||
|
||||
# End-to-end scenario test for Random Modes.
|
||||
# Flow:
|
||||
# 1. Full build with seed S and (optional) theme.
|
||||
# 2. Reroll from that seed (seed+1) and capture deck.
|
||||
# 3. Replay permalink from step 1 (decode token) to reproduce original deck.
|
||||
# Assertions:
|
||||
# - Initial and reproduced decks identical (permalink determinism).
|
||||
# - Reroll seed increments.
|
||||
# - Reroll deck differs from original unless dataset too small (allow equality but tolerate identical for tiny pool).
|
||||
|
||||
|
||||
def _decode_state(token: str) -> dict:
|
||||
pad = "=" * (-len(token) % 4)
|
||||
raw = base64.urlsafe_b64decode((token + pad).encode("ascii")).decode("utf-8")
|
||||
return json.loads(raw)
|
||||
|
||||
|
||||
def test_random_end_to_end_flow(monkeypatch):
|
||||
monkeypatch.setenv("RANDOM_MODES", "1")
|
||||
monkeypatch.setenv("RANDOM_UI", "1")
|
||||
monkeypatch.setenv("CSV_FILES_DIR", os.path.join("csv_files", "testdata"))
|
||||
from code.web.app import app
|
||||
client = TestClient(app)
|
||||
|
||||
seed = 5150
|
||||
# Step 1: Full build
|
||||
r1 = client.post("/api/random_full_build", json={"seed": seed, "theme": "Tokens"})
|
||||
assert r1.status_code == 200, r1.text
|
||||
d1 = r1.json()
|
||||
assert d1.get("seed") == seed
|
||||
deck1 = d1.get("decklist")
|
||||
assert isinstance(deck1, list)
|
||||
permalink = d1.get("permalink")
|
||||
assert permalink and permalink.startswith("/build/from?state=")
|
||||
|
||||
# Step 2: Reroll
|
||||
r2 = client.post("/api/random_reroll", json={"seed": seed})
|
||||
assert r2.status_code == 200, r2.text
|
||||
d2 = r2.json()
|
||||
assert d2.get("seed") == seed + 1
|
||||
deck2 = d2.get("decklist")
|
||||
assert isinstance(deck2, list)
|
||||
|
||||
# Allow equality for tiny dataset; but typically expect difference
|
||||
if d2.get("commander") == d1.get("commander"):
|
||||
# At least one card difference ideally
|
||||
# If exact decklist same, just accept (document small test pool)
|
||||
pass
|
||||
else:
|
||||
assert d2.get("commander") != d1.get("commander") or deck2 != deck1
|
||||
|
||||
# Step 3: Replay permalink
|
||||
token = permalink.split("state=", 1)[1]
|
||||
decoded = _decode_state(token)
|
||||
rnd = decoded.get("random") or {}
|
||||
r3 = client.post("/api/random_full_build", json={
|
||||
"seed": rnd.get("seed"),
|
||||
"theme": rnd.get("theme"),
|
||||
"constraints": rnd.get("constraints"),
|
||||
})
|
||||
assert r3.status_code == 200, r3.text
|
||||
d3 = r3.json()
|
||||
# Deck reproduced
|
||||
assert d3.get("decklist") == deck1
|
||||
assert d3.get("commander") == d1.get("commander")
|
||||
43
code/tests/test_random_fallback_and_constraints.py
Normal file
43
code/tests/test_random_fallback_and_constraints.py
Normal file
|
|
@ -0,0 +1,43 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import importlib
|
||||
import os
|
||||
from starlette.testclient import TestClient
|
||||
|
||||
|
||||
def _mk_client(monkeypatch):
|
||||
monkeypatch.setenv("RANDOM_MODES", "1")
|
||||
monkeypatch.setenv("CSV_FILES_DIR", os.path.join("csv_files", "testdata"))
|
||||
app_module = importlib.import_module('code.web.app')
|
||||
return TestClient(app_module.app)
|
||||
|
||||
|
||||
def test_invalid_theme_triggers_fallback_and_echoes_original_theme(monkeypatch):
|
||||
client = _mk_client(monkeypatch)
|
||||
payload = {"seed": 777, "theme": "this theme does not exist"}
|
||||
r = client.post('/api/random_full_build', json=payload)
|
||||
assert r.status_code == 200
|
||||
data = r.json()
|
||||
# Fallback flag should be set with original_theme echoed
|
||||
assert data.get("fallback") is True
|
||||
assert data.get("original_theme") == payload["theme"]
|
||||
# Theme is still the provided theme (we indicate fallback via the flag)
|
||||
assert data.get("theme") == payload["theme"]
|
||||
# Commander/decklist should be present
|
||||
assert isinstance(data.get("commander"), str) and data["commander"]
|
||||
assert isinstance(data.get("decklist"), list)
|
||||
|
||||
|
||||
def test_constraints_impossible_returns_422_with_detail(monkeypatch):
|
||||
client = _mk_client(monkeypatch)
|
||||
# Set an unrealistically high requirement to force impossible constraint
|
||||
payload = {"seed": 101, "constraints": {"require_min_candidates": 1000000}}
|
||||
r = client.post('/api/random_full_build', json=payload)
|
||||
assert r.status_code == 422
|
||||
data = r.json()
|
||||
# Structured error payload
|
||||
assert data.get("status") == 422
|
||||
detail = data.get("detail")
|
||||
assert isinstance(detail, dict)
|
||||
assert detail.get("error") == "constraints_impossible"
|
||||
assert isinstance(detail.get("pool_size"), int)
|
||||
|
|
@ -1,9 +1,32 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import pytest
|
||||
from fastapi.testclient import TestClient
|
||||
from deck_builder.random_entrypoint import build_random_full_deck
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def client():
|
||||
os.environ["RANDOM_MODES"] = "1"
|
||||
os.environ["CSV_FILES_DIR"] = os.path.join("csv_files", "testdata")
|
||||
from web.app import app
|
||||
with TestClient(app) as c:
|
||||
yield c
|
||||
|
||||
|
||||
def test_full_build_same_seed_produces_same_deck(client: TestClient):
|
||||
body = {"seed": 4242}
|
||||
r1 = client.post("/api/random_full_build", json=body)
|
||||
assert r1.status_code == 200, r1.text
|
||||
d1 = r1.json()
|
||||
r2 = client.post("/api/random_full_build", json=body)
|
||||
assert r2.status_code == 200, r2.text
|
||||
d2 = r2.json()
|
||||
assert d1.get("seed") == d2.get("seed") == 4242
|
||||
assert d1.get("decklist") == d2.get("decklist")
|
||||
|
||||
|
||||
def test_random_full_build_is_deterministic_on_frozen_dataset(monkeypatch):
|
||||
# Use frozen dataset for determinism
|
||||
monkeypatch.setenv("CSV_FILES_DIR", os.path.join("csv_files", "testdata"))
|
||||
|
|
|
|||
31
code/tests/test_random_full_build_exports.py
Normal file
31
code/tests/test_random_full_build_exports.py
Normal file
|
|
@ -0,0 +1,31 @@
|
|||
import os
|
||||
import json
|
||||
from deck_builder.random_entrypoint import build_random_full_deck
|
||||
|
||||
def test_random_full_build_writes_sidecars():
|
||||
# Run build in real project context so CSV inputs exist
|
||||
os.makedirs('deck_files', exist_ok=True)
|
||||
res = build_random_full_deck(theme="Goblin Kindred", seed=12345)
|
||||
assert res.csv_path is not None, "CSV path should be returned"
|
||||
assert os.path.isfile(res.csv_path), f"CSV not found: {res.csv_path}"
|
||||
base, _ = os.path.splitext(res.csv_path)
|
||||
summary_path = base + '.summary.json'
|
||||
assert os.path.isfile(summary_path), "Summary sidecar missing"
|
||||
with open(summary_path,'r',encoding='utf-8') as f:
|
||||
data = json.load(f)
|
||||
assert 'meta' in data and 'summary' in data, "Malformed summary sidecar"
|
||||
comp_path = base + '_compliance.json'
|
||||
# Compliance may be empty dict depending on bracket policy; ensure file exists when compliance object returned
|
||||
if res.compliance:
|
||||
assert os.path.isfile(comp_path), "Compliance file missing despite compliance object"
|
||||
# Basic CSV sanity: contains header Name
|
||||
with open(res.csv_path,'r',encoding='utf-8') as f:
|
||||
head = f.read(200)
|
||||
assert 'Name' in head, "CSV appears malformed"
|
||||
# Cleanup artifacts to avoid polluting workspace (best effort)
|
||||
for p in [res.csv_path, summary_path, comp_path]:
|
||||
try:
|
||||
if os.path.isfile(p):
|
||||
os.remove(p)
|
||||
except Exception:
|
||||
pass
|
||||
32
code/tests/test_random_metrics_and_seed_history.py
Normal file
32
code/tests/test_random_metrics_and_seed_history.py
Normal file
|
|
@ -0,0 +1,32 @@
|
|||
from __future__ import annotations
|
||||
import os
|
||||
from fastapi.testclient import TestClient
|
||||
|
||||
def test_metrics_and_seed_history(monkeypatch):
|
||||
monkeypatch.setenv('RANDOM_MODES', '1')
|
||||
monkeypatch.setenv('RANDOM_UI', '1')
|
||||
monkeypatch.setenv('RANDOM_TELEMETRY', '1')
|
||||
monkeypatch.setenv('CSV_FILES_DIR', os.path.join('csv_files', 'testdata'))
|
||||
from code.web.app import app
|
||||
client = TestClient(app)
|
||||
|
||||
# Build + reroll to generate metrics and seed history
|
||||
r1 = client.post('/api/random_full_build', json={'seed': 9090})
|
||||
assert r1.status_code == 200, r1.text
|
||||
r2 = client.post('/api/random_reroll', json={'seed': 9090})
|
||||
assert r2.status_code == 200, r2.text
|
||||
|
||||
# Metrics
|
||||
m = client.get('/status/random_metrics')
|
||||
assert m.status_code == 200, m.text
|
||||
mj = m.json()
|
||||
assert mj.get('ok') is True
|
||||
metrics = mj.get('metrics') or {}
|
||||
assert 'full_build' in metrics and 'reroll' in metrics
|
||||
|
||||
# Seed history
|
||||
sh = client.get('/api/random/seeds')
|
||||
assert sh.status_code == 200
|
||||
sj = sh.json()
|
||||
seeds = sj.get('seeds') or []
|
||||
assert any(s == 9090 for s in seeds) and sj.get('last') in seeds
|
||||
63
code/tests/test_random_performance_p95.py
Normal file
63
code/tests/test_random_performance_p95.py
Normal file
|
|
@ -0,0 +1,63 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from typing import List
|
||||
from fastapi.testclient import TestClient
|
||||
|
||||
"""Lightweight performance smoke test for Random Modes.
|
||||
|
||||
Runs a small number of builds (SURPRISE_COUNT + THEMED_COUNT) using the frozen
|
||||
CSV test dataset and asserts that the p95 elapsed_ms is under the configured
|
||||
threshold (default 1000ms) unless PERF_SKIP=1 is set.
|
||||
|
||||
This is intentionally lenient and should not be treated as a microbenchmark; it
|
||||
serves as a regression guard for accidental O(N^2) style slowdowns.
|
||||
"""
|
||||
|
||||
SURPRISE_COUNT = int(os.getenv("PERF_SURPRISE_COUNT", "15"))
|
||||
THEMED_COUNT = int(os.getenv("PERF_THEMED_COUNT", "15"))
|
||||
THRESHOLD_MS = int(os.getenv("PERF_P95_THRESHOLD_MS", "1000"))
|
||||
SKIP = os.getenv("PERF_SKIP") == "1"
|
||||
THEME = os.getenv("PERF_SAMPLE_THEME", "Tokens")
|
||||
|
||||
|
||||
def _elapsed(diag: dict) -> int:
|
||||
try:
|
||||
return int(diag.get("elapsed_ms") or 0)
|
||||
except Exception:
|
||||
return 0
|
||||
|
||||
|
||||
def test_random_performance_p95(monkeypatch): # pragma: no cover - performance heuristic
|
||||
if SKIP:
|
||||
return # allow opt-out in CI or constrained environments
|
||||
|
||||
monkeypatch.setenv("RANDOM_MODES", "1")
|
||||
monkeypatch.setenv("CSV_FILES_DIR", os.path.join("csv_files", "testdata"))
|
||||
from code.web.app import app
|
||||
client = TestClient(app)
|
||||
|
||||
samples: List[int] = []
|
||||
|
||||
# Surprise (no theme)
|
||||
for i in range(SURPRISE_COUNT):
|
||||
r = client.post("/api/random_full_build", json={"seed": 10000 + i})
|
||||
assert r.status_code == 200, r.text
|
||||
samples.append(_elapsed(r.json().get("diagnostics") or {}))
|
||||
|
||||
# Themed
|
||||
for i in range(THEMED_COUNT):
|
||||
r = client.post("/api/random_full_build", json={"seed": 20000 + i, "theme": THEME})
|
||||
assert r.status_code == 200, r.text
|
||||
samples.append(_elapsed(r.json().get("diagnostics") or {}))
|
||||
|
||||
# Basic sanity: no zeros for all entries (some builds may be extremely fast; allow zeros but not all)
|
||||
assert len(samples) == SURPRISE_COUNT + THEMED_COUNT
|
||||
if all(s == 0 for s in samples): # degenerate path
|
||||
return
|
||||
|
||||
# p95
|
||||
sorted_samples = sorted(samples)
|
||||
idx = max(0, int(round(0.95 * (len(sorted_samples) - 1))))
|
||||
p95 = sorted_samples[idx]
|
||||
assert p95 < THRESHOLD_MS, f"p95 {p95}ms exceeds threshold {THRESHOLD_MS}ms (samples={samples})"
|
||||
57
code/tests/test_random_permalink_reproduction.py
Normal file
57
code/tests/test_random_permalink_reproduction.py
Normal file
|
|
@ -0,0 +1,57 @@
|
|||
import os
|
||||
import base64
|
||||
import json
|
||||
|
||||
import pytest
|
||||
from fastapi.testclient import TestClient
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def client():
|
||||
# Ensure flags and frozen dataset
|
||||
os.environ["RANDOM_MODES"] = "1"
|
||||
os.environ["RANDOM_UI"] = "1"
|
||||
os.environ["CSV_FILES_DIR"] = os.path.join("csv_files", "testdata")
|
||||
|
||||
from web.app import app
|
||||
|
||||
with TestClient(app) as c:
|
||||
yield c
|
||||
|
||||
|
||||
def _decode_state_token(token: str) -> dict:
|
||||
pad = "=" * (-len(token) % 4)
|
||||
raw = base64.urlsafe_b64decode((token + pad).encode("ascii")).decode("utf-8")
|
||||
return json.loads(raw)
|
||||
|
||||
|
||||
def test_permalink_reproduces_random_full_build(client: TestClient):
|
||||
# Build once with a fixed seed
|
||||
seed = 1111
|
||||
r1 = client.post("/api/random_full_build", json={"seed": seed})
|
||||
assert r1.status_code == 200, r1.text
|
||||
data1 = r1.json()
|
||||
assert data1.get("seed") == seed
|
||||
assert data1.get("permalink")
|
||||
deck1 = data1.get("decklist")
|
||||
|
||||
# Extract and decode permalink token
|
||||
permalink: str = data1["permalink"]
|
||||
assert permalink.startswith("/build/from?state=")
|
||||
token = permalink.split("state=", 1)[1]
|
||||
decoded = _decode_state_token(token)
|
||||
# Validate token contains the random payload
|
||||
rnd = decoded.get("random") or {}
|
||||
assert rnd.get("seed") == seed
|
||||
# Rebuild using only the fields contained in the permalink random payload
|
||||
r2 = client.post("/api/random_full_build", json={
|
||||
"seed": rnd.get("seed"),
|
||||
"theme": rnd.get("theme"),
|
||||
"constraints": rnd.get("constraints"),
|
||||
})
|
||||
assert r2.status_code == 200, r2.text
|
||||
data2 = r2.json()
|
||||
deck2 = data2.get("decklist")
|
||||
|
||||
# Reproduction should be identical
|
||||
assert deck2 == deck1
|
||||
54
code/tests/test_random_permalink_roundtrip.py
Normal file
54
code/tests/test_random_permalink_roundtrip.py
Normal file
|
|
@ -0,0 +1,54 @@
|
|||
import os
|
||||
import base64
|
||||
import json
|
||||
|
||||
import pytest
|
||||
from fastapi.testclient import TestClient
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def client():
|
||||
# Ensure flags and frozen dataset
|
||||
os.environ["RANDOM_MODES"] = "1"
|
||||
os.environ["RANDOM_UI"] = "1"
|
||||
os.environ["CSV_FILES_DIR"] = os.path.join("csv_files", "testdata")
|
||||
|
||||
from web.app import app
|
||||
|
||||
with TestClient(app) as c:
|
||||
yield c
|
||||
|
||||
|
||||
def _decode_state_token(token: str) -> dict:
|
||||
pad = "=" * (-len(token) % 4)
|
||||
raw = base64.urlsafe_b64decode((token + pad).encode("ascii")).decode("utf-8")
|
||||
return json.loads(raw)
|
||||
|
||||
|
||||
def test_permalink_roundtrip_via_build_routes(client: TestClient):
|
||||
# Create a permalink via random full build
|
||||
r1 = client.post("/api/random_full_build", json={"seed": 777})
|
||||
assert r1.status_code == 200, r1.text
|
||||
p1 = r1.json().get("permalink")
|
||||
assert p1 and p1.startswith("/build/from?state=")
|
||||
token = p1.split("state=", 1)[1]
|
||||
state1 = _decode_state_token(token)
|
||||
rnd1 = state1.get("random") or {}
|
||||
|
||||
# Visit the permalink (server should rehydrate session from token)
|
||||
r_page = client.get(p1)
|
||||
assert r_page.status_code == 200
|
||||
|
||||
# Ask server to produce a permalink from current session
|
||||
r2 = client.get("/build/permalink")
|
||||
assert r2.status_code == 200, r2.text
|
||||
body2 = r2.json()
|
||||
assert body2.get("ok") is True
|
||||
p2 = body2.get("permalink")
|
||||
assert p2 and p2.startswith("/build/from?state=")
|
||||
token2 = p2.split("state=", 1)[1]
|
||||
state2 = _decode_state_token(token2)
|
||||
rnd2 = state2.get("random") or {}
|
||||
|
||||
# The random payload should survive the roundtrip unchanged
|
||||
assert rnd2 == rnd1
|
||||
82
code/tests/test_random_rate_limit_headers.py
Normal file
82
code/tests/test_random_rate_limit_headers.py
Normal file
|
|
@ -0,0 +1,82 @@
|
|||
import os
|
||||
import time
|
||||
from typing import Optional
|
||||
|
||||
import pytest
|
||||
from fastapi.testclient import TestClient
|
||||
import sys
|
||||
|
||||
|
||||
def _client_with_flags(window_s: int = 2, limit_random: int = 2, limit_build: int = 2, limit_suggest: int = 2) -> TestClient:
|
||||
# Ensure flags are set prior to importing app
|
||||
os.environ['RANDOM_MODES'] = '1'
|
||||
os.environ['RANDOM_UI'] = '1'
|
||||
os.environ['RANDOM_RATE_LIMIT'] = '1'
|
||||
os.environ['RATE_LIMIT_WINDOW_S'] = str(window_s)
|
||||
os.environ['RANDOM_RATE_LIMIT_RANDOM'] = str(limit_random)
|
||||
os.environ['RANDOM_RATE_LIMIT_BUILD'] = str(limit_build)
|
||||
os.environ['RANDOM_RATE_LIMIT_SUGGEST'] = str(limit_suggest)
|
||||
|
||||
# Force fresh import so RATE_LIMIT_* constants reflect env
|
||||
sys.modules.pop('code.web.app', None)
|
||||
from code.web import app as app_module # type: ignore
|
||||
# Force override constants for deterministic test
|
||||
try:
|
||||
app_module.RATE_LIMIT_ENABLED = True # type: ignore[attr-defined]
|
||||
app_module.RATE_LIMIT_WINDOW_S = window_s # type: ignore[attr-defined]
|
||||
app_module.RATE_LIMIT_RANDOM = limit_random # type: ignore[attr-defined]
|
||||
app_module.RATE_LIMIT_BUILD = limit_build # type: ignore[attr-defined]
|
||||
app_module.RATE_LIMIT_SUGGEST = limit_suggest # type: ignore[attr-defined]
|
||||
# Reset in-memory counters
|
||||
if hasattr(app_module, '_RL_COUNTS'):
|
||||
app_module._RL_COUNTS.clear() # type: ignore[attr-defined]
|
||||
except Exception:
|
||||
pass
|
||||
return TestClient(app_module.app)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("path, method, payload, header_check", [
|
||||
("/api/random_reroll", "post", {"seed": 1}, True),
|
||||
("/themes/api/suggest?q=to", "get", None, True),
|
||||
])
|
||||
def test_rate_limit_emits_headers_and_429(path: str, method: str, payload: Optional[dict], header_check: bool):
|
||||
client = _client_with_flags(window_s=5, limit_random=1, limit_suggest=1)
|
||||
|
||||
# first call should be OK or at least emit rate-limit headers
|
||||
if method == 'post':
|
||||
r1 = client.post(path, json=payload)
|
||||
else:
|
||||
r1 = client.get(path)
|
||||
assert 'X-RateLimit-Reset' in r1.headers
|
||||
assert 'X-RateLimit-Remaining' in r1.headers or r1.status_code == 429
|
||||
|
||||
# Drive additional requests to exceed the remaining budget deterministically
|
||||
rem = None
|
||||
try:
|
||||
if 'X-RateLimit-Remaining' in r1.headers:
|
||||
rem = int(r1.headers['X-RateLimit-Remaining'])
|
||||
except Exception:
|
||||
rem = None
|
||||
|
||||
attempts = (rem + 1) if isinstance(rem, int) else 5
|
||||
rN = r1
|
||||
for _ in range(attempts):
|
||||
if method == 'post':
|
||||
rN = client.post(path, json=payload)
|
||||
else:
|
||||
rN = client.get(path)
|
||||
if rN.status_code == 429:
|
||||
break
|
||||
|
||||
assert rN.status_code == 429
|
||||
assert 'Retry-After' in rN.headers
|
||||
|
||||
# Wait for window to pass, then call again and expect success
|
||||
time.sleep(5.2)
|
||||
if method == 'post':
|
||||
r3 = client.post(path, json=payload)
|
||||
else:
|
||||
r3 = client.get(path)
|
||||
|
||||
assert r3.status_code != 429
|
||||
assert 'X-RateLimit-Remaining' in r3.headers
|
||||
25
code/tests/test_random_reroll_diagnostics_parity.py
Normal file
25
code/tests/test_random_reroll_diagnostics_parity.py
Normal file
|
|
@ -0,0 +1,25 @@
|
|||
from __future__ import annotations
|
||||
import importlib
|
||||
import os
|
||||
from starlette.testclient import TestClient
|
||||
|
||||
|
||||
def _client(monkeypatch):
|
||||
monkeypatch.setenv('RANDOM_MODES', '1')
|
||||
monkeypatch.setenv('CSV_FILES_DIR', os.path.join('csv_files', 'testdata'))
|
||||
app_module = importlib.import_module('code.web.app')
|
||||
return TestClient(app_module.app)
|
||||
|
||||
|
||||
def test_reroll_diagnostics_match_full_build(monkeypatch):
|
||||
client = _client(monkeypatch)
|
||||
base = client.post('/api/random_full_build', json={'seed': 321})
|
||||
assert base.status_code == 200
|
||||
seed = base.json()['seed']
|
||||
reroll = client.post('/api/random_reroll', json={'seed': seed})
|
||||
assert reroll.status_code == 200
|
||||
d_base = base.json().get('diagnostics') or {}
|
||||
d_reroll = reroll.json().get('diagnostics') or {}
|
||||
# Allow reroll to omit elapsed_ms difference but keys should at least cover attempts/timeouts flags
|
||||
for k in ['attempts', 'timeout_hit', 'retries_exhausted']:
|
||||
assert k in d_base and k in d_reroll
|
||||
43
code/tests/test_random_reroll_idempotency.py
Normal file
43
code/tests/test_random_reroll_idempotency.py
Normal file
|
|
@ -0,0 +1,43 @@
|
|||
import os
|
||||
|
||||
import pytest
|
||||
from fastapi.testclient import TestClient
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def client():
|
||||
# Ensure flags and frozen dataset
|
||||
os.environ["RANDOM_MODES"] = "1"
|
||||
os.environ["RANDOM_UI"] = "1"
|
||||
os.environ["CSV_FILES_DIR"] = os.path.join("csv_files", "testdata")
|
||||
|
||||
from web.app import app
|
||||
|
||||
with TestClient(app) as c:
|
||||
yield c
|
||||
|
||||
|
||||
def test_reroll_idempotency_and_progression(client: TestClient):
|
||||
# Initial build
|
||||
base_seed = 2024
|
||||
r1 = client.post("/api/random_full_build", json={"seed": base_seed})
|
||||
assert r1.status_code == 200, r1.text
|
||||
d1 = r1.json()
|
||||
deck1 = d1.get("decklist")
|
||||
assert isinstance(deck1, list) and deck1
|
||||
|
||||
# Rebuild with the same seed should produce identical result
|
||||
r_same = client.post("/api/random_full_build", json={"seed": base_seed})
|
||||
assert r_same.status_code == 200, r_same.text
|
||||
deck_same = r_same.json().get("decklist")
|
||||
assert deck_same == deck1
|
||||
|
||||
# Reroll (seed+1) should typically change the result
|
||||
r2 = client.post("/api/random_reroll", json={"seed": base_seed})
|
||||
assert r2.status_code == 200, r2.text
|
||||
d2 = r2.json()
|
||||
assert d2.get("seed") == base_seed + 1
|
||||
deck2 = d2.get("decklist")
|
||||
|
||||
# It is acceptable that a small dataset could still coincide, but in practice should differ
|
||||
assert deck2 != deck1 or d2.get("commander") != d1.get("commander")
|
||||
45
code/tests/test_random_reroll_locked_artifacts.py
Normal file
45
code/tests/test_random_reroll_locked_artifacts.py
Normal file
|
|
@ -0,0 +1,45 @@
|
|||
import os
|
||||
import time
|
||||
from glob import glob
|
||||
from fastapi.testclient import TestClient
|
||||
|
||||
|
||||
def _client():
|
||||
os.environ['RANDOM_UI'] = '1'
|
||||
os.environ['RANDOM_MODES'] = '1'
|
||||
os.environ['CSV_FILES_DIR'] = os.path.join('csv_files','testdata')
|
||||
from web.app import app
|
||||
return TestClient(app)
|
||||
|
||||
|
||||
def _recent_files(pattern: str, since: float):
|
||||
out = []
|
||||
for p in glob(pattern):
|
||||
try:
|
||||
if os.path.getmtime(p) >= since:
|
||||
out.append(p)
|
||||
except Exception:
|
||||
pass
|
||||
return out
|
||||
|
||||
|
||||
def test_locked_reroll_generates_summary_and_compliance():
|
||||
c = _client()
|
||||
# First random build (api) to establish commander/seed
|
||||
r = c.post('/api/random_reroll', json={})
|
||||
assert r.status_code == 200, r.text
|
||||
data = r.json()
|
||||
commander = data['commander']
|
||||
seed = data['seed']
|
||||
|
||||
start = time.time()
|
||||
# Locked reroll via HTMX path (form style)
|
||||
form_body = f"seed={seed}&commander={commander}&mode=reroll_same_commander"
|
||||
r2 = c.post('/hx/random_reroll', data=form_body, headers={'Content-Type':'application/x-www-form-urlencoded'})
|
||||
assert r2.status_code == 200, r2.text
|
||||
|
||||
# Look for new sidecar/compliance created after start
|
||||
recent_summary = _recent_files('deck_files/*_*.summary.json', start)
|
||||
recent_compliance = _recent_files('deck_files/*_compliance.json', start)
|
||||
assert recent_summary, 'Expected at least one new summary json after locked reroll'
|
||||
assert recent_compliance, 'Expected at least one new compliance json after locked reroll'
|
||||
36
code/tests/test_random_reroll_locked_commander.py
Normal file
36
code/tests/test_random_reroll_locked_commander.py
Normal file
|
|
@ -0,0 +1,36 @@
|
|||
import json
|
||||
import os
|
||||
from fastapi.testclient import TestClient
|
||||
|
||||
|
||||
def _new_client():
|
||||
os.environ['RANDOM_MODES'] = '1'
|
||||
os.environ['RANDOM_UI'] = '1'
|
||||
os.environ['CSV_FILES_DIR'] = os.path.join('csv_files','testdata')
|
||||
from web.app import app
|
||||
return TestClient(app)
|
||||
|
||||
|
||||
def test_reroll_keeps_commander():
|
||||
client = _new_client()
|
||||
# Initial random build (api path) to get commander + seed
|
||||
r1 = client.post('/api/random_reroll', json={})
|
||||
assert r1.status_code == 200
|
||||
data1 = r1.json()
|
||||
commander = data1['commander']
|
||||
seed = data1['seed']
|
||||
|
||||
# First reroll with commander lock
|
||||
headers = {'Content-Type': 'application/json'}
|
||||
body = json.dumps({'seed': seed, 'commander': commander, 'mode': 'reroll_same_commander'})
|
||||
r2 = client.post('/hx/random_reroll', data=body, headers=headers)
|
||||
assert r2.status_code == 200
|
||||
html1 = r2.text
|
||||
assert commander in html1
|
||||
|
||||
# Second reroll should keep same commander (seed increments so prior +1 used on server)
|
||||
body2 = json.dumps({'seed': seed + 1, 'commander': commander, 'mode': 'reroll_same_commander'})
|
||||
r3 = client.post('/hx/random_reroll', data=body2, headers=headers)
|
||||
assert r3.status_code == 200
|
||||
html2 = r3.text
|
||||
assert commander in html2
|
||||
31
code/tests/test_random_reroll_locked_commander_form.py
Normal file
31
code/tests/test_random_reroll_locked_commander_form.py
Normal file
|
|
@ -0,0 +1,31 @@
|
|||
from fastapi.testclient import TestClient
|
||||
from urllib.parse import quote_plus
|
||||
import os
|
||||
|
||||
|
||||
def _new_client():
|
||||
os.environ['RANDOM_MODES'] = '1'
|
||||
os.environ['RANDOM_UI'] = '1'
|
||||
os.environ['CSV_FILES_DIR'] = os.path.join('csv_files','testdata')
|
||||
from web.app import app
|
||||
return TestClient(app)
|
||||
|
||||
|
||||
def test_reroll_keeps_commander_form_encoded():
|
||||
client = _new_client()
|
||||
r1 = client.post('/api/random_reroll', json={})
|
||||
assert r1.status_code == 200
|
||||
data1 = r1.json()
|
||||
commander = data1['commander']
|
||||
seed = data1['seed']
|
||||
|
||||
form_body = f"seed={seed}&commander={quote_plus(commander)}&mode=reroll_same_commander"
|
||||
r2 = client.post('/hx/random_reroll', data=form_body, headers={'Content-Type': 'application/x-www-form-urlencoded'})
|
||||
assert r2.status_code == 200
|
||||
assert commander in r2.text
|
||||
|
||||
# second reroll with incremented seed
|
||||
form_body2 = f"seed={seed+1}&commander={quote_plus(commander)}&mode=reroll_same_commander"
|
||||
r3 = client.post('/hx/random_reroll', data=form_body2, headers={'Content-Type': 'application/x-www-form-urlencoded'})
|
||||
assert r3.status_code == 200
|
||||
assert commander in r3.text
|
||||
27
code/tests/test_random_reroll_locked_no_duplicate_exports.py
Normal file
27
code/tests/test_random_reroll_locked_no_duplicate_exports.py
Normal file
|
|
@ -0,0 +1,27 @@
|
|||
import os
|
||||
import glob
|
||||
from fastapi.testclient import TestClient
|
||||
|
||||
def _client():
|
||||
os.environ['RANDOM_UI'] = '1'
|
||||
os.environ['RANDOM_MODES'] = '1'
|
||||
os.environ['CSV_FILES_DIR'] = os.path.join('csv_files','testdata')
|
||||
from web.app import app
|
||||
return TestClient(app)
|
||||
|
||||
|
||||
def test_locked_reroll_single_export():
|
||||
c = _client()
|
||||
# Initial surprise build
|
||||
r = c.post('/api/random_reroll', json={})
|
||||
assert r.status_code == 200
|
||||
seed = r.json()['seed']
|
||||
commander = r.json()['commander']
|
||||
before_csvs = set(glob.glob('deck_files/*.csv'))
|
||||
form_body = f"seed={seed}&commander={commander}&mode=reroll_same_commander"
|
||||
r2 = c.post('/hx/random_reroll', data=form_body, headers={'Content-Type':'application/x-www-form-urlencoded'})
|
||||
assert r2.status_code == 200
|
||||
after_csvs = set(glob.glob('deck_files/*.csv'))
|
||||
new_csvs = after_csvs - before_csvs
|
||||
# Expect exactly 1 new csv file for the reroll (not two)
|
||||
assert len(new_csvs) == 1, f"Expected 1 new csv, got {len(new_csvs)}: {new_csvs}"
|
||||
42
code/tests/test_random_seed_persistence.py
Normal file
42
code/tests/test_random_seed_persistence.py
Normal file
|
|
@ -0,0 +1,42 @@
|
|||
import os
|
||||
|
||||
import pytest
|
||||
from fastapi.testclient import TestClient
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def client():
|
||||
os.environ["RANDOM_MODES"] = "1"
|
||||
os.environ["RANDOM_UI"] = "1"
|
||||
os.environ["CSV_FILES_DIR"] = os.path.join("csv_files", "testdata")
|
||||
from web.app import app
|
||||
with TestClient(app) as c:
|
||||
yield c
|
||||
|
||||
|
||||
def test_recent_seeds_flow(client: TestClient):
|
||||
# Initially empty
|
||||
r0 = client.get("/api/random/seeds")
|
||||
assert r0.status_code == 200, r0.text
|
||||
data0 = r0.json()
|
||||
assert data0.get("seeds") == [] or data0.get("seeds") is not None
|
||||
|
||||
# Run a full build with a specific seed
|
||||
r1 = client.post("/api/random_full_build", json={"seed": 1001})
|
||||
assert r1.status_code == 200, r1.text
|
||||
d1 = r1.json()
|
||||
assert d1.get("seed") == 1001
|
||||
|
||||
# Reroll (should increment to 1002) and be stored
|
||||
r2 = client.post("/api/random_reroll", json={"seed": 1001})
|
||||
assert r2.status_code == 200, r2.text
|
||||
d2 = r2.json()
|
||||
assert d2.get("seed") == 1002
|
||||
|
||||
# Fetch recent seeds; expect to include both 1001 and 1002, with last==1002
|
||||
r3 = client.get("/api/random/seeds")
|
||||
assert r3.status_code == 200, r3.text
|
||||
d3 = r3.json()
|
||||
seeds = d3.get("seeds") or []
|
||||
assert 1001 in seeds and 1002 in seeds
|
||||
assert d3.get("last") == 1002
|
||||
22
code/tests/test_random_ui_page.py
Normal file
22
code/tests/test_random_ui_page.py
Normal file
|
|
@ -0,0 +1,22 @@
|
|||
import os
|
||||
|
||||
import pytest
|
||||
from fastapi.testclient import TestClient
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def client():
|
||||
os.environ["RANDOM_MODES"] = "1"
|
||||
os.environ["RANDOM_UI"] = "1"
|
||||
os.environ["CSV_FILES_DIR"] = os.path.join("csv_files", "testdata")
|
||||
|
||||
from web.app import app
|
||||
|
||||
with TestClient(app) as c:
|
||||
yield c
|
||||
|
||||
|
||||
def test_random_modes_page_renders(client: TestClient):
|
||||
r = client.get("/random")
|
||||
assert r.status_code == 200
|
||||
assert "Random Modes" in r.text
|
||||
43
code/tests/test_theme_catalog_mapping_and_samples.py
Normal file
43
code/tests/test_theme_catalog_mapping_and_samples.py
Normal file
|
|
@ -0,0 +1,43 @@
|
|||
from __future__ import annotations
|
||||
import json
|
||||
import os
|
||||
import importlib
|
||||
from pathlib import Path
|
||||
from starlette.testclient import TestClient
|
||||
from code.type_definitions_theme_catalog import ThemeCatalog # type: ignore
|
||||
|
||||
CATALOG_PATH = Path('config/themes/theme_list.json')
|
||||
|
||||
|
||||
def _load_catalog():
|
||||
raw = json.loads(CATALOG_PATH.read_text(encoding='utf-8'))
|
||||
return ThemeCatalog(**raw)
|
||||
|
||||
|
||||
def test_catalog_schema_parses_and_has_minimum_themes():
|
||||
cat = _load_catalog()
|
||||
assert len(cat.themes) >= 5 # sanity floor
|
||||
# Validate each theme has canonical name and synergy list is list
|
||||
for t in cat.themes:
|
||||
assert isinstance(t.theme, str) and t.theme
|
||||
assert isinstance(t.synergies, list)
|
||||
|
||||
|
||||
def test_sample_seeds_produce_non_empty_decks(monkeypatch):
|
||||
# Use test data to keep runs fast/deterministic
|
||||
monkeypatch.setenv('RANDOM_MODES', '1')
|
||||
monkeypatch.setenv('CSV_FILES_DIR', os.path.join('csv_files', 'testdata'))
|
||||
app_module = importlib.import_module('code.web.app')
|
||||
client = TestClient(app_module.app)
|
||||
cat = _load_catalog()
|
||||
# Choose up to 5 themes (deterministic ordering/selection) for smoke check
|
||||
themes = sorted([t.theme for t in cat.themes])[:5]
|
||||
for th in themes:
|
||||
r = client.post('/api/random_full_build', json={'theme': th, 'seed': 999})
|
||||
assert r.status_code == 200
|
||||
data = r.json()
|
||||
# Decklist should exist (may be empty if headless not available, allow fallback leniency)
|
||||
assert 'seed' in data
|
||||
assert data.get('theme') == th or data.get('theme') == th # explicit equality for clarity
|
||||
assert isinstance(data.get('commander'), str)
|
||||
|
||||
16
code/tests/test_theme_catalog_schema_validation.py
Normal file
16
code/tests/test_theme_catalog_schema_validation.py
Normal file
|
|
@ -0,0 +1,16 @@
|
|||
from pathlib import Path
|
||||
import json
|
||||
|
||||
|
||||
def test_theme_list_json_validates_against_pydantic_and_fast_path():
|
||||
# Load JSON
|
||||
p = Path('config/themes/theme_list.json')
|
||||
raw = json.loads(p.read_text(encoding='utf-8'))
|
||||
|
||||
# Pydantic validation
|
||||
from code.type_definitions_theme_catalog import ThemeCatalog # type: ignore
|
||||
catalog = ThemeCatalog(**raw)
|
||||
assert isinstance(catalog.themes, list) and len(catalog.themes) > 0
|
||||
# Basic fields exist on entries
|
||||
first = catalog.themes[0]
|
||||
assert first.theme and isinstance(first.synergies, list)
|
||||
35
code/tests/test_theme_input_validation.py
Normal file
35
code/tests/test_theme_input_validation.py
Normal file
|
|
@ -0,0 +1,35 @@
|
|||
from __future__ import annotations
|
||||
import importlib
|
||||
import os
|
||||
from starlette.testclient import TestClient
|
||||
|
||||
def _client(monkeypatch):
|
||||
monkeypatch.setenv('RANDOM_MODES', '1')
|
||||
monkeypatch.setenv('CSV_FILES_DIR', os.path.join('csv_files', 'testdata'))
|
||||
app_module = importlib.import_module('code.web.app')
|
||||
return TestClient(app_module.app)
|
||||
|
||||
|
||||
def test_theme_rejects_disallowed_chars(monkeypatch):
|
||||
client = _client(monkeypatch)
|
||||
bad = {"seed": 10, "theme": "Bad;DROP TABLE"}
|
||||
r = client.post('/api/random_full_build', json=bad)
|
||||
assert r.status_code == 200
|
||||
data = r.json()
|
||||
# Theme should be None or absent because it was rejected
|
||||
assert data.get('theme') in (None, '')
|
||||
|
||||
|
||||
def test_theme_rejects_long(monkeypatch):
|
||||
client = _client(monkeypatch)
|
||||
long_theme = 'X'*200
|
||||
r = client.post('/api/random_full_build', json={"seed": 11, "theme": long_theme})
|
||||
assert r.status_code == 200
|
||||
assert r.json().get('theme') in (None, '')
|
||||
|
||||
|
||||
def test_theme_accepts_normal(monkeypatch):
|
||||
client = _client(monkeypatch)
|
||||
r = client.post('/api/random_full_build', json={"seed": 12, "theme": "Tokens"})
|
||||
assert r.status_code == 200
|
||||
assert r.json().get('theme') == 'Tokens'
|
||||
653
code/web/app.py
653
code/web/app.py
|
|
@ -12,10 +12,11 @@ import uuid
|
|||
import logging
|
||||
from starlette.exceptions import HTTPException as StarletteHTTPException
|
||||
from starlette.middleware.gzip import GZipMiddleware
|
||||
from typing import Any
|
||||
from typing import Any, Optional, Dict
|
||||
from contextlib import asynccontextmanager
|
||||
from .services.combo_utils import detect_all as _detect_all
|
||||
from .services.theme_catalog_loader import prewarm_common_filters # type: ignore
|
||||
from .services.tasks import get_session, new_sid, set_session_value # type: ignore
|
||||
|
||||
# Resolve template/static dirs relative to this file
|
||||
_THIS_DIR = Path(__file__).resolve().parent
|
||||
|
|
@ -116,6 +117,41 @@ def _as_int(val: str | None, default: int) -> int:
|
|||
return default
|
||||
RANDOM_MAX_ATTEMPTS = _as_int(os.getenv("RANDOM_MAX_ATTEMPTS"), 5)
|
||||
RANDOM_TIMEOUT_MS = _as_int(os.getenv("RANDOM_TIMEOUT_MS"), 5000)
|
||||
RANDOM_TELEMETRY = _as_bool(os.getenv("RANDOM_TELEMETRY"), False)
|
||||
RATE_LIMIT_ENABLED = _as_bool(os.getenv("RANDOM_RATE_LIMIT"), False)
|
||||
RATE_LIMIT_WINDOW_S = _as_int(os.getenv("RATE_LIMIT_WINDOW_S"), 10)
|
||||
RATE_LIMIT_RANDOM = _as_int(os.getenv("RANDOM_RATE_LIMIT_RANDOM"), 10)
|
||||
RATE_LIMIT_BUILD = _as_int(os.getenv("RANDOM_RATE_LIMIT_BUILD"), 10)
|
||||
RATE_LIMIT_SUGGEST = _as_int(os.getenv("RANDOM_RATE_LIMIT_SUGGEST"), 30)
|
||||
RANDOM_STRUCTURED_LOGS = _as_bool(os.getenv("RANDOM_STRUCTURED_LOGS"), False)
|
||||
|
||||
# Simple theme input validation constraints
|
||||
_THEME_MAX_LEN = 60
|
||||
_THEME_ALLOWED_CHARS = set("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789 -'_")
|
||||
|
||||
def _sanitize_theme(raw: Optional[str]) -> Optional[str]:
|
||||
"""Return a sanitized theme string or None if invalid.
|
||||
|
||||
Rules (minimal by design):
|
||||
- Strip leading/trailing whitespace
|
||||
- Reject if empty after strip
|
||||
- Reject if length > _THEME_MAX_LEN
|
||||
- Reject if any disallowed character present
|
||||
"""
|
||||
if raw is None:
|
||||
return None
|
||||
try:
|
||||
s = str(raw).strip()
|
||||
except Exception:
|
||||
return None
|
||||
if not s:
|
||||
return None
|
||||
if len(s) > _THEME_MAX_LEN:
|
||||
return None
|
||||
for ch in s:
|
||||
if ch not in _THEME_ALLOWED_CHARS:
|
||||
return None
|
||||
return s
|
||||
|
||||
# Theme default from environment: THEME=light|dark|system (case-insensitive). Defaults to system.
|
||||
_THEME_ENV = (os.getenv("THEME") or "").strip().lower()
|
||||
|
|
@ -157,6 +193,102 @@ def _load_catalog_hash() -> str:
|
|||
|
||||
templates.env.globals["catalog_hash"] = _load_catalog_hash()
|
||||
|
||||
# --- Optional in-memory telemetry for Random Modes ---
|
||||
_RANDOM_METRICS: dict[str, dict[str, int]] = {
|
||||
"build": {"success": 0, "constraints_impossible": 0, "error": 0},
|
||||
"full_build": {"success": 0, "fallback": 0, "constraints_impossible": 0, "error": 0},
|
||||
"reroll": {"success": 0, "fallback": 0, "constraints_impossible": 0, "error": 0},
|
||||
}
|
||||
|
||||
def _record_random_event(kind: str, *, success: bool = False, fallback: bool = False, constraints_impossible: bool = False, error: bool = False) -> None:
|
||||
if not RANDOM_TELEMETRY:
|
||||
return
|
||||
try:
|
||||
k = _RANDOM_METRICS.get(kind)
|
||||
if not k:
|
||||
return
|
||||
if success:
|
||||
k["success"] = int(k.get("success", 0)) + 1
|
||||
if fallback:
|
||||
k["fallback"] = int(k.get("fallback", 0)) + 1
|
||||
if constraints_impossible:
|
||||
k["constraints_impossible"] = int(k.get("constraints_impossible", 0)) + 1
|
||||
if error:
|
||||
k["error"] = int(k.get("error", 0)) + 1
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# --- Optional structured logging for Random Modes ---
|
||||
def _log_random_event(kind: str, request: Request, status: str, **fields: Any) -> None:
|
||||
if not RANDOM_STRUCTURED_LOGS:
|
||||
return
|
||||
try:
|
||||
rid = getattr(request.state, "request_id", None)
|
||||
payload = {
|
||||
"event": "random_mode",
|
||||
"kind": kind,
|
||||
"status": status,
|
||||
"request_id": rid,
|
||||
"path": str(request.url.path),
|
||||
"ip": _client_ip(request),
|
||||
}
|
||||
for k, v in (fields or {}).items():
|
||||
# keep payload concise
|
||||
if isinstance(v, (str, int, float, bool)) or v is None:
|
||||
payload[k] = v
|
||||
logging.getLogger("web.random").info(_json.dumps(payload, separators=(",", ":")))
|
||||
except Exception:
|
||||
# Never break a request due to logging
|
||||
pass
|
||||
|
||||
# --- Optional in-memory rate limiting (best-effort, per-IP, per-group) ---
|
||||
_RL_COUNTS: dict[tuple[str, str, int], int] = {}
|
||||
|
||||
def _client_ip(request: Request) -> str:
|
||||
try:
|
||||
ip = getattr(getattr(request, "client", None), "host", None) or request.headers.get("X-Forwarded-For")
|
||||
if isinstance(ip, str) and ip.strip():
|
||||
# If XFF has multiple, use first
|
||||
return ip.split(",")[0].strip()
|
||||
except Exception:
|
||||
pass
|
||||
return "unknown"
|
||||
|
||||
def rate_limit_check(request: Request, group: str) -> tuple[int, int] | None:
|
||||
"""Check and increment rate limit for (ip, group).
|
||||
|
||||
Returns (remaining, reset_epoch) if enabled, else None.
|
||||
Raises HTTPException(429) when exceeded.
|
||||
"""
|
||||
if not RATE_LIMIT_ENABLED:
|
||||
return None
|
||||
limit = 0
|
||||
if group == "random":
|
||||
limit = int(RATE_LIMIT_RANDOM)
|
||||
elif group == "build":
|
||||
limit = int(RATE_LIMIT_BUILD)
|
||||
elif group == "suggest":
|
||||
limit = int(RATE_LIMIT_SUGGEST)
|
||||
if limit <= 0:
|
||||
return None
|
||||
win = max(1, int(RATE_LIMIT_WINDOW_S))
|
||||
now = int(time.time())
|
||||
window_id = now // win
|
||||
reset_epoch = (window_id + 1) * win
|
||||
key = (_client_ip(request), group, window_id)
|
||||
count = int(_RL_COUNTS.get(key, 0)) + 1
|
||||
_RL_COUNTS[key] = count
|
||||
remaining = max(0, limit - count)
|
||||
if count > limit:
|
||||
# Too many
|
||||
retry_after = max(0, reset_epoch - now)
|
||||
raise HTTPException(status_code=429, detail="rate_limited", headers={
|
||||
"Retry-After": str(retry_after),
|
||||
"X-RateLimit-Remaining": "0",
|
||||
"X-RateLimit-Reset": str(reset_epoch),
|
||||
})
|
||||
return (remaining, reset_epoch)
|
||||
|
||||
# --- Simple fragment cache for template partials (low-risk, TTL-based) ---
|
||||
_FRAGMENT_CACHE: dict[tuple[str, str], tuple[float, str]] = {}
|
||||
_FRAGMENT_TTL_SECONDS = 60.0
|
||||
|
|
@ -181,6 +313,61 @@ def render_cached(template_name: str, cache_key: str | None, /, **ctx: Any) -> s
|
|||
except Exception:
|
||||
return templates.get_template(template_name).render(**ctx)
|
||||
|
||||
|
||||
# --- Session helpers for Random Modes ---
|
||||
def _ensure_session(request: Request) -> tuple[str, dict[str, Any], bool]:
|
||||
"""Get or create a session for the incoming request.
|
||||
|
||||
Returns (sid, session_dict, had_existing_cookie)
|
||||
"""
|
||||
sid = request.cookies.get("sid")
|
||||
had_cookie = bool(sid)
|
||||
if not sid:
|
||||
sid = new_sid()
|
||||
sess = get_session(sid)
|
||||
return sid, sess, had_cookie
|
||||
|
||||
|
||||
def _update_random_session(request: Request, *, seed: int, theme: Any, constraints: Any) -> tuple[str, bool]:
|
||||
"""Update session with latest random build seed/theme/constraints and maintain a bounded recent list."""
|
||||
sid, sess, had_cookie = _ensure_session(request)
|
||||
rb = dict(sess.get("random_build") or {})
|
||||
rb["seed"] = int(seed)
|
||||
if theme is not None:
|
||||
rb["theme"] = theme
|
||||
if constraints is not None:
|
||||
rb["constraints"] = constraints
|
||||
recent = list(rb.get("recent_seeds") or [])
|
||||
# Append and keep last 10 unique (most-recent-first)
|
||||
recent.append(int(seed))
|
||||
# Dedupe while preserving order from the right (most recent)
|
||||
seen = set()
|
||||
dedup_rev: list[int] = []
|
||||
for s in reversed(recent):
|
||||
if s in seen:
|
||||
continue
|
||||
seen.add(s)
|
||||
dedup_rev.append(s)
|
||||
dedup = list(reversed(dedup_rev))
|
||||
rb["recent_seeds"] = dedup[-10:]
|
||||
set_session_value(sid, "random_build", rb)
|
||||
return sid, had_cookie
|
||||
|
||||
def _toggle_seed_favorite(sid: str, seed: int) -> list[int]:
|
||||
"""Toggle a seed in the favorites list and persist. Returns updated favorites."""
|
||||
sess = get_session(sid)
|
||||
rb = dict(sess.get("random_build") or {})
|
||||
favs = list(rb.get("favorite_seeds") or [])
|
||||
if seed in favs:
|
||||
favs = [s for s in favs if s != seed]
|
||||
else:
|
||||
favs.append(seed)
|
||||
# Keep stable ordering (insertion order) and cap to last 50
|
||||
favs = favs[-50:]
|
||||
rb["favorite_seeds"] = favs
|
||||
set_session_value(sid, "random_build", rb)
|
||||
return favs
|
||||
|
||||
templates.env.globals["render_cached"] = render_cached
|
||||
|
||||
# --- Diagnostics: request-id and uptime ---
|
||||
|
|
@ -241,11 +428,29 @@ async def status_sys():
|
|||
"RANDOM_UI": bool(RANDOM_UI),
|
||||
"RANDOM_MAX_ATTEMPTS": int(RANDOM_MAX_ATTEMPTS),
|
||||
"RANDOM_TIMEOUT_MS": int(RANDOM_TIMEOUT_MS),
|
||||
"RANDOM_TELEMETRY": bool(RANDOM_TELEMETRY),
|
||||
"RANDOM_STRUCTURED_LOGS": bool(RANDOM_STRUCTURED_LOGS),
|
||||
"RANDOM_RATE_LIMIT": bool(RATE_LIMIT_ENABLED),
|
||||
"RATE_LIMIT_WINDOW_S": int(RATE_LIMIT_WINDOW_S),
|
||||
"RANDOM_RATE_LIMIT_RANDOM": int(RATE_LIMIT_RANDOM),
|
||||
"RANDOM_RATE_LIMIT_BUILD": int(RATE_LIMIT_BUILD),
|
||||
"RANDOM_RATE_LIMIT_SUGGEST": int(RATE_LIMIT_SUGGEST),
|
||||
},
|
||||
}
|
||||
except Exception:
|
||||
return {"version": "unknown", "uptime_seconds": 0, "flags": {}}
|
||||
|
||||
@app.get("/status/random_metrics")
|
||||
async def status_random_metrics():
|
||||
try:
|
||||
if not RANDOM_TELEMETRY:
|
||||
return JSONResponse({"ok": False, "error": "telemetry_disabled"}, status_code=403)
|
||||
# Return a shallow copy to avoid mutation from clients
|
||||
out = {k: dict(v) for k, v in _RANDOM_METRICS.items()}
|
||||
return JSONResponse({"ok": True, "metrics": out})
|
||||
except Exception:
|
||||
return JSONResponse({"ok": False, "metrics": {}}, status_code=500)
|
||||
|
||||
def random_modes_enabled() -> bool:
|
||||
"""Dynamic check so tests that set env after import still work.
|
||||
|
||||
|
|
@ -259,6 +464,9 @@ async def api_random_build(request: Request):
|
|||
if not random_modes_enabled():
|
||||
raise HTTPException(status_code=404, detail="Random Modes disabled")
|
||||
try:
|
||||
t0 = time.time()
|
||||
# Optional rate limiting (count this request per-IP)
|
||||
rl = rate_limit_check(request, "build")
|
||||
body = {}
|
||||
try:
|
||||
body = await request.json()
|
||||
|
|
@ -267,6 +475,7 @@ async def api_random_build(request: Request):
|
|||
except Exception:
|
||||
body = {}
|
||||
theme = body.get("theme")
|
||||
theme = _sanitize_theme(theme)
|
||||
constraints = body.get("constraints")
|
||||
seed = body.get("seed")
|
||||
attempts = body.get("attempts", int(RANDOM_MAX_ATTEMPTS))
|
||||
|
|
@ -277,7 +486,7 @@ async def api_random_build(request: Request):
|
|||
except Exception:
|
||||
timeout_s = max(0.1, float(RANDOM_TIMEOUT_MS) / 1000.0)
|
||||
# Import on-demand to avoid heavy costs at module import time
|
||||
from deck_builder.random_entrypoint import build_random_deck # type: ignore
|
||||
from deck_builder.random_entrypoint import build_random_deck, RandomConstraintsImpossibleError # type: ignore
|
||||
res = build_random_deck(
|
||||
theme=theme,
|
||||
constraints=constraints,
|
||||
|
|
@ -286,7 +495,19 @@ async def api_random_build(request: Request):
|
|||
timeout_s=float(timeout_s),
|
||||
)
|
||||
rid = getattr(request.state, "request_id", None)
|
||||
return {
|
||||
_record_random_event("build", success=True)
|
||||
elapsed_ms = int(round((time.time() - t0) * 1000))
|
||||
_log_random_event(
|
||||
"build",
|
||||
request,
|
||||
"success",
|
||||
seed=int(res.seed),
|
||||
theme=(res.theme or None),
|
||||
attempts=int(attempts),
|
||||
timeout_ms=int(timeout_ms),
|
||||
elapsed_ms=elapsed_ms,
|
||||
)
|
||||
payload = {
|
||||
"seed": int(res.seed),
|
||||
"commander": res.commander,
|
||||
"theme": res.theme,
|
||||
|
|
@ -295,10 +516,25 @@ async def api_random_build(request: Request):
|
|||
"timeout_ms": int(timeout_ms),
|
||||
"request_id": rid,
|
||||
}
|
||||
resp = JSONResponse(payload)
|
||||
if rl:
|
||||
remaining, reset_epoch = rl
|
||||
try:
|
||||
resp.headers["X-RateLimit-Remaining"] = str(remaining)
|
||||
resp.headers["X-RateLimit-Reset"] = str(reset_epoch)
|
||||
except Exception:
|
||||
pass
|
||||
return resp
|
||||
except HTTPException:
|
||||
raise
|
||||
except RandomConstraintsImpossibleError as ex:
|
||||
_record_random_event("build", constraints_impossible=True)
|
||||
_log_random_event("build", request, "constraints_impossible")
|
||||
raise HTTPException(status_code=422, detail={"error": "constraints_impossible", "message": str(ex), "constraints": ex.constraints, "pool_size": ex.pool_size})
|
||||
except Exception as ex:
|
||||
logging.getLogger("web").error(f"random_build failed: {ex}")
|
||||
_record_random_event("build", error=True)
|
||||
_log_random_event("build", request, "error")
|
||||
raise HTTPException(status_code=500, detail="random_build failed")
|
||||
|
||||
|
||||
|
|
@ -308,6 +544,8 @@ async def api_random_full_build(request: Request):
|
|||
if not random_modes_enabled():
|
||||
raise HTTPException(status_code=404, detail="Random Modes disabled")
|
||||
try:
|
||||
t0 = time.time()
|
||||
rl = rate_limit_check(request, "build")
|
||||
body = {}
|
||||
try:
|
||||
body = await request.json()
|
||||
|
|
@ -316,6 +554,7 @@ async def api_random_full_build(request: Request):
|
|||
except Exception:
|
||||
body = {}
|
||||
theme = body.get("theme")
|
||||
theme = _sanitize_theme(theme)
|
||||
constraints = body.get("constraints")
|
||||
seed = body.get("seed")
|
||||
attempts = body.get("attempts", int(RANDOM_MAX_ATTEMPTS))
|
||||
|
|
@ -327,7 +566,7 @@ async def api_random_full_build(request: Request):
|
|||
timeout_s = max(0.1, float(RANDOM_TIMEOUT_MS) / 1000.0)
|
||||
|
||||
# Build a full deck deterministically
|
||||
from deck_builder.random_entrypoint import build_random_full_deck # type: ignore
|
||||
from deck_builder.random_entrypoint import build_random_full_deck, RandomConstraintsImpossibleError # type: ignore
|
||||
res = build_random_full_deck(
|
||||
theme=theme,
|
||||
constraints=constraints,
|
||||
|
|
@ -354,8 +593,23 @@ async def api_random_full_build(request: Request):
|
|||
except Exception:
|
||||
permalink = None
|
||||
|
||||
# Persist to session (so recent seeds includes initial seed)
|
||||
sid, had_cookie = _update_random_session(request, seed=int(res.seed), theme=res.theme, constraints=res.constraints or {})
|
||||
rid = getattr(request.state, "request_id", None)
|
||||
return {
|
||||
_record_random_event("full_build", success=True, fallback=bool(getattr(res, "theme_fallback", False)))
|
||||
elapsed_ms = int(round((time.time() - t0) * 1000))
|
||||
_log_random_event(
|
||||
"full_build",
|
||||
request,
|
||||
"success",
|
||||
seed=int(res.seed),
|
||||
theme=(res.theme or None),
|
||||
attempts=int(attempts),
|
||||
timeout_ms=int(timeout_ms),
|
||||
elapsed_ms=elapsed_ms,
|
||||
fallback=bool(getattr(res, "theme_fallback", False)),
|
||||
)
|
||||
resp = JSONResponse({
|
||||
"seed": int(res.seed),
|
||||
"commander": res.commander,
|
||||
"decklist": res.decklist or [],
|
||||
|
|
@ -364,21 +618,48 @@ async def api_random_full_build(request: Request):
|
|||
"permalink": permalink,
|
||||
"attempts": int(attempts),
|
||||
"timeout_ms": int(timeout_ms),
|
||||
"diagnostics": res.diagnostics or {},
|
||||
"fallback": bool(getattr(res, "theme_fallback", False)),
|
||||
"original_theme": getattr(res, "original_theme", None),
|
||||
"summary": getattr(res, "summary", None),
|
||||
"csv_path": getattr(res, "csv_path", None),
|
||||
"txt_path": getattr(res, "txt_path", None),
|
||||
"compliance": getattr(res, "compliance", None),
|
||||
"request_id": rid,
|
||||
}
|
||||
})
|
||||
if rl:
|
||||
remaining, reset_epoch = rl
|
||||
try:
|
||||
resp.headers["X-RateLimit-Remaining"] = str(remaining)
|
||||
resp.headers["X-RateLimit-Reset"] = str(reset_epoch)
|
||||
except Exception:
|
||||
pass
|
||||
if not had_cookie:
|
||||
try:
|
||||
resp.set_cookie("sid", sid, max_age=60*60*8, httponly=True, samesite="lax")
|
||||
except Exception:
|
||||
pass
|
||||
return resp
|
||||
except HTTPException:
|
||||
raise
|
||||
except RandomConstraintsImpossibleError as ex:
|
||||
_record_random_event("full_build", constraints_impossible=True)
|
||||
_log_random_event("full_build", request, "constraints_impossible")
|
||||
raise HTTPException(status_code=422, detail={"error": "constraints_impossible", "message": str(ex), "constraints": ex.constraints, "pool_size": ex.pool_size})
|
||||
except Exception as ex:
|
||||
logging.getLogger("web").error(f"random_full_build failed: {ex}")
|
||||
_record_random_event("full_build", error=True)
|
||||
_log_random_event("full_build", request, "error")
|
||||
raise HTTPException(status_code=500, detail="random_full_build failed")
|
||||
|
||||
|
||||
@app.post("/api/random_reroll")
|
||||
async def api_random_reroll(request: Request):
|
||||
# Gate behind feature flag
|
||||
if not random_modes_enabled():
|
||||
raise HTTPException(status_code=404, detail="Random Modes disabled")
|
||||
try:
|
||||
t0 = time.time()
|
||||
rl = rate_limit_check(request, "random")
|
||||
body = {}
|
||||
try:
|
||||
body = await request.json()
|
||||
|
|
@ -387,6 +668,7 @@ async def api_random_reroll(request: Request):
|
|||
except Exception:
|
||||
body = {}
|
||||
theme = body.get("theme")
|
||||
theme = _sanitize_theme(theme)
|
||||
constraints = body.get("constraints")
|
||||
last_seed = body.get("seed")
|
||||
# Simple deterministic reroll policy: increment prior seed when provided; else generate fresh
|
||||
|
|
@ -431,8 +713,24 @@ async def api_random_reroll(request: Request):
|
|||
except Exception:
|
||||
permalink = None
|
||||
|
||||
# Persist in session and set sid cookie if we just created it
|
||||
sid, had_cookie = _update_random_session(request, seed=int(res.seed), theme=res.theme, constraints=res.constraints or {})
|
||||
rid = getattr(request.state, "request_id", None)
|
||||
return {
|
||||
_record_random_event("reroll", success=True, fallback=bool(getattr(res, "theme_fallback", False)))
|
||||
elapsed_ms = int(round((time.time() - t0) * 1000))
|
||||
_log_random_event(
|
||||
"reroll",
|
||||
request,
|
||||
"success",
|
||||
seed=int(res.seed),
|
||||
theme=(res.theme or None),
|
||||
attempts=int(attempts),
|
||||
timeout_ms=int(timeout_ms),
|
||||
elapsed_ms=elapsed_ms,
|
||||
prev_seed=(int(last_seed) if isinstance(last_seed, int) or (isinstance(last_seed, str) and str(last_seed).isdigit()) else None),
|
||||
fallback=bool(getattr(res, "theme_fallback", False)),
|
||||
)
|
||||
resp = JSONResponse({
|
||||
"previous_seed": (int(last_seed) if isinstance(last_seed, int) or (isinstance(last_seed, str) and str(last_seed).isdigit()) else None),
|
||||
"seed": int(res.seed),
|
||||
"commander": res.commander,
|
||||
|
|
@ -442,12 +740,29 @@ async def api_random_reroll(request: Request):
|
|||
"permalink": permalink,
|
||||
"attempts": int(attempts),
|
||||
"timeout_ms": int(timeout_ms),
|
||||
"diagnostics": res.diagnostics or {},
|
||||
"summary": getattr(res, "summary", None),
|
||||
"request_id": rid,
|
||||
}
|
||||
})
|
||||
if rl:
|
||||
remaining, reset_epoch = rl
|
||||
try:
|
||||
resp.headers["X-RateLimit-Remaining"] = str(remaining)
|
||||
resp.headers["X-RateLimit-Reset"] = str(reset_epoch)
|
||||
except Exception:
|
||||
pass
|
||||
if not had_cookie:
|
||||
try:
|
||||
resp.set_cookie("sid", sid, max_age=60*60*8, httponly=True, samesite="lax")
|
||||
except Exception:
|
||||
pass
|
||||
return resp
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as ex:
|
||||
logging.getLogger("web").error(f"random_reroll failed: {ex}")
|
||||
_record_random_event("reroll", error=True)
|
||||
_log_random_event("reroll", request, "error")
|
||||
raise HTTPException(status_code=500, detail="random_reroll failed")
|
||||
|
||||
|
||||
|
|
@ -456,16 +771,39 @@ async def hx_random_reroll(request: Request):
|
|||
# Small HTMX endpoint returning a partial HTML fragment for in-page updates
|
||||
if not RANDOM_UI or not RANDOM_MODES:
|
||||
raise HTTPException(status_code=404, detail="Random UI disabled")
|
||||
body = {}
|
||||
rl = rate_limit_check(request, "random")
|
||||
body: Dict[str, Any] = {}
|
||||
raw_text = ""
|
||||
# Primary: attempt JSON
|
||||
try:
|
||||
body = await request.json()
|
||||
if not isinstance(body, dict):
|
||||
body = {}
|
||||
except Exception:
|
||||
body = {}
|
||||
# Fallback: form/urlencoded (htmx default) or stray query-like payload
|
||||
if not body:
|
||||
try:
|
||||
raw_bytes = await request.body()
|
||||
raw_text = raw_bytes.decode("utf-8", errors="ignore")
|
||||
from urllib.parse import parse_qs
|
||||
parsed = parse_qs(raw_text, keep_blank_values=True)
|
||||
flat: Dict[str, Any] = {}
|
||||
for k, v in parsed.items():
|
||||
if not v:
|
||||
continue
|
||||
flat[k] = v[0] if len(v) == 1 else v
|
||||
body = flat or {}
|
||||
except Exception:
|
||||
body = {}
|
||||
last_seed = body.get("seed")
|
||||
mode = body.get("mode") # "surprise" (default) vs "reroll_same_commander"
|
||||
locked_commander = body.get("commander") if mode == "reroll_same_commander" else None
|
||||
theme = body.get("theme")
|
||||
theme = _sanitize_theme(theme)
|
||||
constraints = body.get("constraints")
|
||||
attempts_override = body.get("attempts")
|
||||
timeout_ms_override = body.get("timeout_ms")
|
||||
try:
|
||||
new_seed = int(last_seed) + 1 if last_seed is not None else None
|
||||
except Exception:
|
||||
|
|
@ -473,19 +811,167 @@ async def hx_random_reroll(request: Request):
|
|||
if new_seed is None:
|
||||
from random_util import generate_seed # type: ignore
|
||||
new_seed = int(generate_seed())
|
||||
|
||||
# Import outside conditional to avoid UnboundLocalError when branch not taken
|
||||
from deck_builder.random_entrypoint import build_random_full_deck # type: ignore
|
||||
res = build_random_full_deck(
|
||||
theme=theme,
|
||||
constraints=constraints,
|
||||
seed=new_seed,
|
||||
attempts=int(RANDOM_MAX_ATTEMPTS),
|
||||
timeout_s=float(RANDOM_TIMEOUT_MS) / 1000.0,
|
||||
)
|
||||
try:
|
||||
t0 = time.time()
|
||||
_attempts = int(attempts_override) if attempts_override is not None else int(RANDOM_MAX_ATTEMPTS)
|
||||
try:
|
||||
_timeout_ms = int(timeout_ms_override) if timeout_ms_override is not None else int(RANDOM_TIMEOUT_MS)
|
||||
except Exception:
|
||||
_timeout_ms = int(RANDOM_TIMEOUT_MS)
|
||||
_timeout_s = max(0.1, float(_timeout_ms) / 1000.0)
|
||||
if locked_commander:
|
||||
build_t0 = time.time()
|
||||
from headless_runner import run as _run # type: ignore
|
||||
# Suppress builder's internal initial export to control artifact generation (matches full random path logic)
|
||||
try:
|
||||
import os as _os
|
||||
if _os.getenv('RANDOM_BUILD_SUPPRESS_INITIAL_EXPORT') is None:
|
||||
_os.environ['RANDOM_BUILD_SUPPRESS_INITIAL_EXPORT'] = '1'
|
||||
except Exception:
|
||||
pass
|
||||
builder = _run(command_name=str(locked_commander), seed=new_seed)
|
||||
elapsed_ms = int(round((time.time() - build_t0) * 1000))
|
||||
summary = None
|
||||
try:
|
||||
if hasattr(builder, 'build_deck_summary'):
|
||||
summary = builder.build_deck_summary() # type: ignore[attr-defined]
|
||||
except Exception:
|
||||
summary = None
|
||||
decklist = []
|
||||
try:
|
||||
if hasattr(builder, 'deck_list_final'):
|
||||
decklist = getattr(builder, 'deck_list_final') # type: ignore[attr-defined]
|
||||
except Exception:
|
||||
decklist = []
|
||||
# Controlled artifact export (single pass)
|
||||
csv_path = getattr(builder, 'last_csv_path', None) # type: ignore[attr-defined]
|
||||
txt_path = getattr(builder, 'last_txt_path', None) # type: ignore[attr-defined]
|
||||
compliance = None
|
||||
try:
|
||||
import os as _os
|
||||
import json as _json
|
||||
# Perform exactly one export sequence now
|
||||
if not csv_path and hasattr(builder, 'export_decklist_csv'):
|
||||
try:
|
||||
csv_path = builder.export_decklist_csv() # type: ignore[attr-defined]
|
||||
except Exception:
|
||||
csv_path = None
|
||||
if csv_path and isinstance(csv_path, str):
|
||||
base_path, _ = _os.path.splitext(csv_path)
|
||||
# Ensure txt exists (create if missing)
|
||||
if (not txt_path or not _os.path.isfile(str(txt_path))):
|
||||
try:
|
||||
base_name = _os.path.basename(base_path) + '.txt'
|
||||
if hasattr(builder, 'export_decklist_text'):
|
||||
txt_path = builder.export_decklist_text(filename=base_name) # type: ignore[attr-defined]
|
||||
except Exception:
|
||||
# Fallback: if a txt already exists from a prior build reuse it
|
||||
if _os.path.isfile(base_path + '.txt'):
|
||||
txt_path = base_path + '.txt'
|
||||
comp_path = base_path + '_compliance.json'
|
||||
if _os.path.isfile(comp_path):
|
||||
try:
|
||||
with open(comp_path, 'r', encoding='utf-8') as _cf:
|
||||
compliance = _json.load(_cf)
|
||||
except Exception:
|
||||
compliance = None
|
||||
else:
|
||||
try:
|
||||
if hasattr(builder, 'compute_and_print_compliance'):
|
||||
compliance = builder.compute_and_print_compliance(base_stem=_os.path.basename(base_path)) # type: ignore[attr-defined]
|
||||
except Exception:
|
||||
compliance = None
|
||||
if summary:
|
||||
sidecar = base_path + '.summary.json'
|
||||
if not _os.path.isfile(sidecar):
|
||||
meta = {
|
||||
"commander": getattr(builder, 'commander_name', '') or getattr(builder, 'commander', ''),
|
||||
"tags": list(getattr(builder, 'selected_tags', []) or []) or [t for t in [getattr(builder, 'primary_tag', None), getattr(builder, 'secondary_tag', None), getattr(builder, 'tertiary_tag', None)] if t],
|
||||
"bracket_level": getattr(builder, 'bracket_level', None),
|
||||
"csv": csv_path,
|
||||
"txt": txt_path,
|
||||
"random_seed": int(new_seed),
|
||||
"random_theme": theme,
|
||||
"random_constraints": constraints or {},
|
||||
"locked_commander": True,
|
||||
}
|
||||
try:
|
||||
custom_base = getattr(builder, 'custom_export_base', None)
|
||||
except Exception:
|
||||
custom_base = None
|
||||
if isinstance(custom_base, str) and custom_base.strip():
|
||||
meta["name"] = custom_base.strip()
|
||||
try:
|
||||
with open(sidecar, 'w', encoding='utf-8') as f:
|
||||
_json.dump({"meta": meta, "summary": summary}, f, ensure_ascii=False, indent=2)
|
||||
except Exception:
|
||||
pass
|
||||
except Exception:
|
||||
compliance = None
|
||||
class _Res: # minimal object with expected attrs
|
||||
pass
|
||||
res = _Res()
|
||||
res.seed = int(new_seed)
|
||||
res.commander = locked_commander
|
||||
res.theme = theme
|
||||
res.constraints = constraints or {}
|
||||
res.diagnostics = {"locked_commander": True, "attempts": 1, "elapsed_ms": elapsed_ms}
|
||||
res.summary = summary
|
||||
res.decklist = decklist
|
||||
res.csv_path = csv_path
|
||||
res.txt_path = txt_path
|
||||
res.compliance = compliance
|
||||
else:
|
||||
res = build_random_full_deck(
|
||||
theme=theme,
|
||||
constraints=constraints,
|
||||
seed=new_seed,
|
||||
attempts=int(_attempts),
|
||||
timeout_s=float(_timeout_s),
|
||||
)
|
||||
except Exception as ex:
|
||||
# Map constraints-impossible to a friendly fragment; other errors to a plain note
|
||||
msg = ""
|
||||
if ex.__class__.__name__ == "RandomConstraintsImpossibleError":
|
||||
_record_random_event("reroll", constraints_impossible=True)
|
||||
_log_random_event("reroll", request, "constraints_impossible")
|
||||
msg = "<div class=\"error\">Constraints impossible — try loosening filters.</div>"
|
||||
else:
|
||||
_record_random_event("reroll", error=True)
|
||||
_log_random_event("reroll", request, "error")
|
||||
msg = "<div class=\"error\">Reroll failed. Please try again.</div>"
|
||||
return HTMLResponse(msg, status_code=200)
|
||||
|
||||
# Persist to session
|
||||
sid, had_cookie = _update_random_session(request, seed=int(res.seed), theme=res.theme, constraints=res.constraints or {})
|
||||
|
||||
# Render minimal fragment via Jinja2
|
||||
try:
|
||||
return templates.TemplateResponse(
|
||||
elapsed_ms = int(round((time.time() - t0) * 1000))
|
||||
_log_random_event(
|
||||
"reroll",
|
||||
request,
|
||||
"success",
|
||||
seed=int(res.seed),
|
||||
theme=(res.theme or None),
|
||||
attempts=int(RANDOM_MAX_ATTEMPTS),
|
||||
timeout_ms=int(RANDOM_TIMEOUT_MS),
|
||||
elapsed_ms=elapsed_ms,
|
||||
)
|
||||
# Build permalink token for fragment copy button
|
||||
try:
|
||||
import base64 as _b64
|
||||
_raw = _json.dumps({
|
||||
"commander": res.commander,
|
||||
"random": {"seed": int(res.seed), "theme": res.theme, "constraints": res.constraints or {}},
|
||||
}, separators=(",", ":"))
|
||||
_token = _b64.urlsafe_b64encode(_raw.encode("utf-8")).decode("ascii").rstrip("=")
|
||||
_permalink = f"/build/from?state={_token}"
|
||||
except Exception:
|
||||
_permalink = None
|
||||
resp = templates.TemplateResponse(
|
||||
"partials/random_result.html", # type: ignore
|
||||
{
|
||||
"request": request,
|
||||
|
|
@ -494,20 +980,91 @@ async def hx_random_reroll(request: Request):
|
|||
"decklist": res.decklist or [],
|
||||
"theme": res.theme,
|
||||
"constraints": res.constraints or {},
|
||||
"diagnostics": res.diagnostics or {},
|
||||
"permalink": _permalink,
|
||||
"show_diagnostics": SHOW_DIAGNOSTICS,
|
||||
"fallback": bool(getattr(res, "theme_fallback", False)),
|
||||
"summary": getattr(res, "summary", None),
|
||||
},
|
||||
)
|
||||
if rl:
|
||||
remaining, reset_epoch = rl
|
||||
try:
|
||||
resp.headers["X-RateLimit-Remaining"] = str(remaining)
|
||||
resp.headers["X-RateLimit-Reset"] = str(reset_epoch)
|
||||
except Exception:
|
||||
pass
|
||||
if not had_cookie:
|
||||
try:
|
||||
resp.set_cookie("sid", sid, max_age=60*60*8, httponly=True, samesite="lax")
|
||||
except Exception:
|
||||
pass
|
||||
return resp
|
||||
except Exception as ex:
|
||||
logging.getLogger("web").error(f"hx_random_reroll template error: {ex}")
|
||||
# Fallback to JSON to avoid total failure
|
||||
return JSONResponse(
|
||||
resp = JSONResponse(
|
||||
{
|
||||
"seed": int(res.seed),
|
||||
"commander": res.commander,
|
||||
"decklist": res.decklist or [],
|
||||
"theme": res.theme,
|
||||
"constraints": res.constraints or {},
|
||||
"diagnostics": res.diagnostics or {},
|
||||
}
|
||||
)
|
||||
if not had_cookie:
|
||||
try:
|
||||
resp.set_cookie("sid", sid, max_age=60*60*8, httponly=True, samesite="lax")
|
||||
except Exception:
|
||||
pass
|
||||
return resp
|
||||
|
||||
@app.get("/api/random/seeds")
|
||||
async def api_random_recent_seeds(request: Request):
|
||||
if not random_modes_enabled():
|
||||
raise HTTPException(status_code=404, detail="Random Modes disabled")
|
||||
sid, sess, _ = _ensure_session(request)
|
||||
rb = sess.get("random_build") or {}
|
||||
seeds = list(rb.get("recent_seeds") or [])
|
||||
last = rb.get("seed")
|
||||
favorites = list(rb.get("favorite_seeds") or [])
|
||||
rid = getattr(request.state, "request_id", None)
|
||||
return {"seeds": seeds, "last": last, "favorites": favorites, "request_id": rid}
|
||||
|
||||
@app.post("/api/random/seed_favorite")
|
||||
async def api_random_seed_favorite(request: Request):
|
||||
if not random_modes_enabled():
|
||||
raise HTTPException(status_code=404, detail="Random Modes disabled")
|
||||
sid, sess, _ = _ensure_session(request)
|
||||
try:
|
||||
body = await request.json()
|
||||
if not isinstance(body, dict):
|
||||
body = {}
|
||||
except Exception:
|
||||
body = {}
|
||||
seed = body.get("seed")
|
||||
try:
|
||||
seed_int = int(seed)
|
||||
except Exception:
|
||||
raise HTTPException(status_code=400, detail="invalid seed")
|
||||
favs = _toggle_seed_favorite(sid, seed_int)
|
||||
rid = getattr(request.state, "request_id", None)
|
||||
return {"ok": True, "favorites": favs, "request_id": rid}
|
||||
|
||||
@app.get("/status/random_metrics_ndjson")
|
||||
async def status_random_metrics_ndjson():
|
||||
if not RANDOM_TELEMETRY:
|
||||
return PlainTextResponse("{}\n", media_type="application/x-ndjson")
|
||||
lines = []
|
||||
try:
|
||||
for kind, buckets in _RANDOM_METRICS.items():
|
||||
rec = {"kind": kind}
|
||||
rec.update(buckets)
|
||||
lines.append(_json.dumps(rec, separators=(",", ":")))
|
||||
except Exception:
|
||||
lines.append(_json.dumps({"error": True}))
|
||||
return PlainTextResponse("\n".join(lines) + "\n", media_type="application/x-ndjson")
|
||||
|
||||
# Logs tail endpoint (read-only)
|
||||
@app.get("/status/logs")
|
||||
|
|
@ -620,18 +1177,35 @@ async def http_exception_handler(request: Request, exc: HTTPException):
|
|||
# Friendly HTML page
|
||||
template = "errors/404.html" if exc.status_code == 404 else "errors/4xx.html"
|
||||
try:
|
||||
return templates.TemplateResponse(template, {"request": request, "status": exc.status_code, "detail": exc.detail, "request_id": rid}, status_code=exc.status_code, headers={"X-Request-ID": rid})
|
||||
headers = {"X-Request-ID": rid}
|
||||
try:
|
||||
if getattr(exc, "headers", None):
|
||||
headers.update(exc.headers) # type: ignore[arg-type]
|
||||
except Exception:
|
||||
pass
|
||||
return templates.TemplateResponse(template, {"request": request, "status": exc.status_code, "detail": exc.detail, "request_id": rid}, status_code=exc.status_code, headers=headers)
|
||||
except Exception:
|
||||
# Fallback plain text
|
||||
return PlainTextResponse(f"Error {exc.status_code}: {exc.detail}\nRequest-ID: {rid}", status_code=exc.status_code, headers={"X-Request-ID": rid})
|
||||
headers = {"X-Request-ID": rid}
|
||||
try:
|
||||
if getattr(exc, "headers", None):
|
||||
headers.update(exc.headers) # type: ignore[arg-type]
|
||||
except Exception:
|
||||
pass
|
||||
return PlainTextResponse(f"Error {exc.status_code}: {exc.detail}\nRequest-ID: {rid}", status_code=exc.status_code, headers=headers)
|
||||
# JSON structure for HTMX/API
|
||||
headers = {"X-Request-ID": rid}
|
||||
try:
|
||||
if getattr(exc, "headers", None):
|
||||
headers.update(exc.headers) # type: ignore[arg-type]
|
||||
except Exception:
|
||||
pass
|
||||
return JSONResponse(status_code=exc.status_code, content={
|
||||
"error": True,
|
||||
"status": exc.status_code,
|
||||
"detail": exc.detail,
|
||||
"request_id": rid,
|
||||
"path": str(request.url.path),
|
||||
}, headers={"X-Request-ID": rid})
|
||||
}, headers=headers)
|
||||
|
||||
|
||||
# Also handle Starlette's HTTPException (e.g., 404 route not found)
|
||||
|
|
@ -644,16 +1218,34 @@ async def starlette_http_exception_handler(request: Request, exc: StarletteHTTPE
|
|||
if _wants_html(request):
|
||||
template = "errors/404.html" if exc.status_code == 404 else "errors/4xx.html"
|
||||
try:
|
||||
return templates.TemplateResponse(template, {"request": request, "status": exc.status_code, "detail": exc.detail, "request_id": rid}, status_code=exc.status_code, headers={"X-Request-ID": rid})
|
||||
headers = {"X-Request-ID": rid}
|
||||
try:
|
||||
if getattr(exc, "headers", None):
|
||||
headers.update(exc.headers) # type: ignore[arg-type]
|
||||
except Exception:
|
||||
pass
|
||||
return templates.TemplateResponse(template, {"request": request, "status": exc.status_code, "detail": exc.detail, "request_id": rid}, status_code=exc.status_code, headers=headers)
|
||||
except Exception:
|
||||
return PlainTextResponse(f"Error {exc.status_code}: {exc.detail}\nRequest-ID: {rid}", status_code=exc.status_code, headers={"X-Request-ID": rid})
|
||||
headers = {"X-Request-ID": rid}
|
||||
try:
|
||||
if getattr(exc, "headers", None):
|
||||
headers.update(exc.headers) # type: ignore[arg-type]
|
||||
except Exception:
|
||||
pass
|
||||
return PlainTextResponse(f"Error {exc.status_code}: {exc.detail}\nRequest-ID: {rid}", status_code=exc.status_code, headers=headers)
|
||||
headers = {"X-Request-ID": rid}
|
||||
try:
|
||||
if getattr(exc, "headers", None):
|
||||
headers.update(exc.headers) # type: ignore[arg-type]
|
||||
except Exception:
|
||||
pass
|
||||
return JSONResponse(status_code=exc.status_code, content={
|
||||
"error": True,
|
||||
"status": exc.status_code,
|
||||
"detail": exc.detail,
|
||||
"request_id": rid,
|
||||
"path": str(request.url.path),
|
||||
}, headers={"X-Request-ID": rid})
|
||||
}, headers=headers)
|
||||
|
||||
|
||||
@app.exception_handler(Exception)
|
||||
|
|
@ -675,6 +1267,13 @@ async def unhandled_exception_handler(request: Request, exc: Exception):
|
|||
"path": str(request.url.path),
|
||||
}, headers={"X-Request-ID": rid})
|
||||
|
||||
# --- Random Modes page (minimal shell) ---
|
||||
@app.get("/random", response_class=HTMLResponse)
|
||||
async def random_modes_page(request: Request) -> HTMLResponse:
|
||||
if not random_modes_enabled():
|
||||
raise HTTPException(status_code=404, detail="Random Modes disabled")
|
||||
return templates.TemplateResponse("random/index.html", {"request": request, "random_ui": bool(RANDOM_UI)})
|
||||
|
||||
# Lightweight file download endpoint for exports
|
||||
@app.get("/files")
|
||||
async def get_file(path: str):
|
||||
|
|
|
|||
|
|
@ -115,6 +115,82 @@ def _load_fast_theme_list() -> Optional[list[dict[str, Any]]]:
|
|||
return None
|
||||
|
||||
|
||||
@router.get("/suggest")
|
||||
@router.get("/api/suggest")
|
||||
async def theme_suggest(
|
||||
request: Request,
|
||||
q: str | None = None,
|
||||
limit: int | None = Query(10, ge=1, le=50),
|
||||
):
|
||||
"""Lightweight theme name suggestions for typeahead.
|
||||
|
||||
Prefers the precomputed fast path (theme_list.json). Falls back to full index if unavailable.
|
||||
Returns a compact JSON: {"themes": ["<name>", ...]}.
|
||||
"""
|
||||
try:
|
||||
# Optional rate limit using app helper if available
|
||||
rl_result = None
|
||||
try:
|
||||
from ..app import rate_limit_check # type: ignore
|
||||
rl_result = rate_limit_check(request, "suggest")
|
||||
except HTTPException as http_ex: # propagate 429 with headers
|
||||
raise http_ex
|
||||
except Exception:
|
||||
rl_result = None
|
||||
lim = int(limit or 10)
|
||||
names: list[str] = []
|
||||
fast = _load_fast_theme_list()
|
||||
if fast is not None:
|
||||
try:
|
||||
items = fast
|
||||
if q:
|
||||
ql = q.lower()
|
||||
items = [e for e in items if isinstance(e.get("theme"), str) and ql in e["theme"].lower()]
|
||||
for e in items[: lim * 3]: # pre-slice before unique
|
||||
nm = e.get("theme")
|
||||
if isinstance(nm, str):
|
||||
names.append(nm)
|
||||
except Exception:
|
||||
names = []
|
||||
if not names:
|
||||
# Fallback to full index
|
||||
try:
|
||||
idx = load_index()
|
||||
slugs = filter_slugs_fast(idx, q=q)
|
||||
# summaries_for_slugs returns dicts including 'theme'
|
||||
infos = summaries_for_slugs(idx, slugs[: lim * 3])
|
||||
for inf in infos:
|
||||
nm = inf.get("theme")
|
||||
if isinstance(nm, str):
|
||||
names.append(nm)
|
||||
except Exception:
|
||||
names = []
|
||||
# Deduplicate preserving order, then clamp
|
||||
seen: set[str] = set()
|
||||
out: list[str] = []
|
||||
for nm in names:
|
||||
if nm in seen:
|
||||
continue
|
||||
seen.add(nm)
|
||||
out.append(nm)
|
||||
if len(out) >= lim:
|
||||
break
|
||||
resp = JSONResponse({"themes": out})
|
||||
if rl_result:
|
||||
remaining, reset_epoch = rl_result
|
||||
try:
|
||||
resp.headers["X-RateLimit-Remaining"] = str(remaining)
|
||||
resp.headers["X-RateLimit-Reset"] = str(reset_epoch)
|
||||
except Exception:
|
||||
pass
|
||||
return resp
|
||||
except HTTPException as e:
|
||||
# Propagate FastAPI HTTPException (e.g., 429 with headers)
|
||||
raise e
|
||||
except Exception as e:
|
||||
return JSONResponse({"themes": [], "error": str(e)}, status_code=500)
|
||||
|
||||
|
||||
def _load_tag_flag_time() -> Optional[float]:
|
||||
try:
|
||||
if TAG_FLAG_PATH.exists():
|
||||
|
|
|
|||
|
|
@ -83,6 +83,7 @@
|
|||
<a href="/owned">Owned Library</a>
|
||||
<a href="/decks">Finished Decks</a>
|
||||
<a href="/themes/">Themes</a>
|
||||
{% if random_ui %}<a href="/random">Random</a>{% endif %}
|
||||
{% if show_diagnostics %}<a href="/diagnostics">Diagnostics</a>{% endif %}
|
||||
{% if show_logs %}<a href="/logs">Logs</a>{% endif %}
|
||||
</nav>
|
||||
|
|
@ -514,9 +515,12 @@
|
|||
el.addEventListener('mouseleave', function(){ cardPop.style.display='none'; });
|
||||
});
|
||||
}
|
||||
attachCardHover();
|
||||
bindAllCardImageRetries();
|
||||
document.addEventListener('htmx:afterSwap', function() { attachCardHover(); bindAllCardImageRetries(); });
|
||||
// Expose re-init functions globally for dynamic content
|
||||
window.attachCardHover = attachCardHover;
|
||||
window.bindAllCardImageRetries = bindAllCardImageRetries;
|
||||
attachCardHover();
|
||||
bindAllCardImageRetries();
|
||||
document.addEventListener('htmx:afterSwap', function() { attachCardHover(); bindAllCardImageRetries(); });
|
||||
})();
|
||||
</script>
|
||||
<script>
|
||||
|
|
@ -959,10 +963,20 @@
|
|||
if(!el) return null;
|
||||
// If inside flip button
|
||||
var btn = el.closest && el.closest('.dfc-toggle');
|
||||
if(btn) return btn.closest('.card-sample, .commander-cell, .card-tile, .candidate-tile, .card-preview');
|
||||
if(el.matches && el.matches('img.card-thumb')) return el.closest('.card-sample, .commander-cell, .card-tile, .candidate-tile, .card-preview');
|
||||
if(btn) return btn.closest('.card-sample, .commander-cell, .card-tile, .candidate-tile, .card-preview, .stack-card');
|
||||
// Recognized container classes (add .stack-card for finished/random deck thumbnails)
|
||||
var container = el.closest && el.closest('.card-sample, .commander-cell, .card-tile, .candidate-tile, .card-preview, .stack-card');
|
||||
if(container) return container;
|
||||
// Image-based detection (any card image carrying data-card-name)
|
||||
if(el.matches && (el.matches('img.card-thumb') || el.matches('img[data-card-name]') || el.classList.contains('commander-img'))){
|
||||
var up = el.closest && el.closest('.stack-card');
|
||||
return up || el; // fall back to the image itself
|
||||
}
|
||||
// List view spans (deck summary list mode, finished deck list, etc.)
|
||||
if(el.hasAttribute && el.hasAttribute('data-card-name')) return el;
|
||||
return null;
|
||||
}
|
||||
document.addEventListener('pointermove', function(e){ window.__lastPointerEvent = e; });
|
||||
document.addEventListener('pointerover', function(e){
|
||||
var card = getCardFromEl(e.target);
|
||||
if(!card) return;
|
||||
|
|
@ -987,6 +1001,12 @@
|
|||
var ev = window.__lastPointerEvent || { clientX: (card.getBoundingClientRect().left+12), clientY: (card.getBoundingClientRect().top+12) };
|
||||
show(card, ev);
|
||||
};
|
||||
window.hoverShowByName = function(name){
|
||||
try {
|
||||
var el = document.querySelector('[data-card-name="'+CSS.escape(name)+'"]');
|
||||
if(el){ window.__hoverShowCard(el.closest('.card-sample, .commander-cell, .card-tile, .candidate-tile, .card-preview, .stack-card') || el); }
|
||||
} catch(_) {}
|
||||
};
|
||||
// Keyboard accessibility & focus traversal (P2 UI Hover keyboard accessibility)
|
||||
document.addEventListener('focusin', function(e){ var card=e.target.closest && e.target.closest('.card-sample, .commander-cell'); if(card){ show(card, {clientX:card.getBoundingClientRect().left+10, clientY:card.getBoundingClientRect().top+10}); }});
|
||||
document.addEventListener('focusout', function(e){ var next=e.relatedTarget && e.relatedTarget.closest && e.relatedTarget.closest('.card-sample, .commander-cell'); if(!next) hide(); });
|
||||
|
|
|
|||
|
|
@ -8,6 +8,7 @@
|
|||
<a class="action-button" href="/owned">Owned Library</a>
|
||||
<a class="action-button" href="/decks">Finished Decks</a>
|
||||
<a class="action-button" href="/themes/">Browse Themes</a>
|
||||
{% if random_ui %}<a class="action-button" href="/random">Random Build</a>{% endif %}
|
||||
{% if show_logs %}<a class="action-button" href="/logs">View Logs</a>{% endif %}
|
||||
</div>
|
||||
<div id="themes-quick" style="margin-top:1rem; font-size:.85rem; color:var(--text-muted);">
|
||||
|
|
|
|||
|
|
@ -1,70 +1,15 @@
|
|||
<hr style="margin:1.25rem 0; border-color: var(--border);" />
|
||||
<h4>Deck Summary</h4>
|
||||
{% if versions and (versions.combos or versions.synergies) %}
|
||||
<div class="muted" style="font-size:12px; margin:.1rem 0 .4rem 0;">Combos/Synergies lists: v{{ versions.combos or '?' }} / v{{ versions.synergies or '?' }}</div>
|
||||
{% endif %}
|
||||
<div class="muted" style="font-size:12px; margin:.15rem 0 .4rem 0; display:flex; gap:.75rem; align-items:center; flex-wrap:wrap;">
|
||||
<span>Legend:</span>
|
||||
<span><span class="game-changer" style="font-weight:600;">Game Changer</span> <span class="muted" style="opacity:.8;">(green highlight)</span></span>
|
||||
<span><span class="owned-flag" style="margin:0 .25rem 0 .1rem;">✔</span>Owned • <span class="owned-flag" style="margin:0 .25rem 0 .1rem;">✖</span>Not owned</span>
|
||||
</div>
|
||||
|
||||
<!-- Detected Combos & Synergies (top) -->
|
||||
{% if combos or synergies %}
|
||||
<section style="margin-top:.25rem;">
|
||||
<h5>Combos & Synergies</h5>
|
||||
{% if combos %}
|
||||
<div style="margin:.25rem 0 .5rem 0;">
|
||||
<div class="muted" style="font-weight:600; margin-bottom:.25rem;">Detected Combos ({{ combos|length }})</div>
|
||||
<ul style="list-style:none; padding:0; margin:0; display:grid; grid-template-columns: repeat(auto-fill, minmax(320px, 1fr)); gap:.25rem .75rem;">
|
||||
{% for c in combos %}
|
||||
<li style="border:1px solid var(--border); border-radius:8px; padding:.35rem .5rem; background:#0f1115;" data-combo-names="{{ c.a }}||{{ c.b }}">
|
||||
<span data-card-name="{{ c.a }}">{{ c.a }}</span>
|
||||
<span class="muted"> + </span>
|
||||
<span data-card-name="{{ c.b }}">{{ c.b }}</span>
|
||||
{% if c.cheap_early or c.setup_dependent %}
|
||||
<span class="muted" style="margin-left:.4rem; font-size:12px;">
|
||||
{% if c.cheap_early %}<span title="Cheap/Early" style="border:1px solid var(--border); padding:.05rem .35rem; border-radius:999px;">cheap/early</span>{% endif %}
|
||||
{% if c.setup_dependent %}<span title="Setup Dependent" style="border:1px solid var(--border); padding:.05rem .35rem; border-radius:999px; margin-left:.25rem;">setup</span>{% endif %}
|
||||
</span>
|
||||
{% endif %}
|
||||
</li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
</div>
|
||||
{% endif %}
|
||||
{% if synergies %}
|
||||
<div style="margin:.25rem 0 .5rem 0;">
|
||||
<div class="muted" style="font-weight:600; margin-bottom:.25rem;">Detected Synergies ({{ synergies|length }})</div>
|
||||
<ul style="list-style:none; padding:0; margin:0; display:grid; grid-template-columns: repeat(auto-fill, minmax(320px, 1fr)); gap:.25rem .75rem;">
|
||||
{% for s in synergies %}
|
||||
<li style="border:1px solid var(--border); border-radius:8px; padding:.35rem .5rem; background:#0f1115;" data-combo-names="{{ s.a }}||{{ s.b }}">
|
||||
<span data-card-name="{{ s.a }}">{{ s.a }}</span>
|
||||
<span class="muted"> + </span>
|
||||
<span data-card-name="{{ s.b }}">{{ s.b }}</span>
|
||||
{% if s.tags %}
|
||||
<span class="muted" style="margin-left:.4rem; font-size:12px;">
|
||||
{% for t in s.tags %}<span style="border:1px solid var(--border); padding:.05rem .35rem; border-radius:999px; margin-right:.25rem;">{{ t }}</span>{% endfor %}
|
||||
</span>
|
||||
{% endif %}
|
||||
</li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
</div>
|
||||
{% endif %}
|
||||
</section>
|
||||
{% endif %}
|
||||
|
||||
<!-- Card Type Breakdown with names-only list and hover preview -->
|
||||
<section style="margin-top:.5rem;">
|
||||
<h5>Card Types</h5>
|
||||
<div style="margin:.5rem 0 .25rem 0; display:flex; gap:.5rem; align-items:center;">
|
||||
<span class="muted">View:</span>
|
||||
<div class="seg" role="tablist" aria-label="Type view">
|
||||
<button type="button" class="seg-btn" data-view="list" aria-selected="true">List</button>
|
||||
<button type="button" class="seg-btn" data-view="thumbs">Thumbnails</button>
|
||||
<button type="button" class="seg-btn" data-view="list" aria-selected="true" onclick="(function(btn){var list=document.getElementById('typeview-list');var thumbs=document.getElementById('typeview-thumbs');if(!list||!thumbs)return;list.classList.remove('hidden');thumbs.classList.add('hidden');btn.setAttribute('aria-selected','true');var other=btn.parentElement.querySelector('.seg-btn[data-view=thumbs]');if(other)other.setAttribute('aria-selected','false');try{localStorage.setItem('summaryTypeView','list');}catch(e){}})(this)">List</button>
|
||||
<button type="button" class="seg-btn" data-view="thumbs" onclick="(function(btn){var list=document.getElementById('typeview-list');var thumbs=document.getElementById('typeview-thumbs');if(!list||!thumbs)return;list.classList.add('hidden');thumbs.classList.remove('hidden');btn.setAttribute('aria-selected','true');var other=btn.parentElement.querySelector('.seg-btn[data-view=list]');if(other)other.setAttribute('aria-selected','false');try{localStorage.setItem('summaryTypeView','thumbs');}catch(e){}; (function(){var tv=document.getElementById('typeview-thumbs'); if(!tv) return; tv.querySelectorAll('.stack-wrap').forEach(function(sw){var grid=sw.querySelector('.stack-grid'); if(!grid) return; var cs=getComputedStyle(sw); var cardW=parseFloat(cs.getPropertyValue('--card-w'))||160; var gap=10; var width=sw.clientWidth; if(!width||width<cardW){ sw.style.setProperty('--cols','1'); return;} var cols=Math.max(1,Math.floor((width+gap)/(cardW+gap))); sw.style.setProperty('--cols',String(cols));}); })();})(this)">Thumbnails</button>
|
||||
</div>
|
||||
</div>
|
||||
<div style="display:none" hx-on:load="(function(){try{var mode=localStorage.getItem('summaryTypeView')||'list';if(mode==='thumbs'){var list=document.getElementById('typeview-list');var thumbs=document.getElementById('typeview-thumbs');if(list&&thumbs){list.classList.add('hidden');thumbs.classList.remove('hidden');var lb=document.querySelector('.seg-btn[data-view=list]');var tb=document.querySelector('.seg-btn[data-view=thumbs]');if(lb&&tb){lb.setAttribute('aria-selected','false');tb.setAttribute('aria-selected','true');}thumbs.querySelectorAll('.stack-wrap').forEach(function(sw){var grid=sw.querySelector('.stack-grid');if(!grid)return;var cs=getComputedStyle(sw);var cardW=parseFloat(cs.getPropertyValue('--card-w'))||160;var gap=10;var width=sw.clientWidth;if(!width||width<cardW){sw.style.setProperty('--cols','1');return;}var cols=Math.max(1,Math.floor((width+gap)/(cardW+gap)));sw.style.setProperty('--cols',String(cols));});}}catch(e){}})()"></div>
|
||||
{% set tb = summary.type_breakdown %}
|
||||
{% if tb and tb.counts %}
|
||||
<style>
|
||||
|
|
@ -149,58 +94,7 @@
|
|||
{% endif %}
|
||||
</section>
|
||||
|
||||
<script>
|
||||
(function(){
|
||||
var listBtn = document.querySelector('.seg-btn[data-view="list"]');
|
||||
var thumbsBtn = document.querySelector('.seg-btn[data-view="thumbs"]');
|
||||
var listView = document.getElementById('typeview-list');
|
||||
var thumbsView = document.getElementById('typeview-thumbs');
|
||||
|
||||
function recalcThumbCols() {
|
||||
if (thumbsView.classList.contains('hidden')) return;
|
||||
var wraps = thumbsView.querySelectorAll('.stack-wrap');
|
||||
wraps.forEach(function(sw){
|
||||
var grid = sw.querySelector('.stack-grid');
|
||||
if (!grid) return;
|
||||
var gridStyles = window.getComputedStyle(grid);
|
||||
var gap = parseFloat(gridStyles.columnGap) || 10;
|
||||
var swStyles = window.getComputedStyle(sw);
|
||||
var cardW = parseFloat(swStyles.getPropertyValue('--card-w')) || 160;
|
||||
var width = sw.clientWidth;
|
||||
if (!width || width < cardW) {
|
||||
sw.style.setProperty('--cols', '1');
|
||||
return;
|
||||
}
|
||||
var cols = Math.max(1, Math.floor((width + gap) / (cardW + gap)));
|
||||
sw.style.setProperty('--cols', String(cols));
|
||||
});
|
||||
}
|
||||
|
||||
function debounce(fn, ms){ var t; return function(){ clearTimeout(t); t = setTimeout(fn, ms); }; }
|
||||
var debouncedRecalc = debounce(recalcThumbCols, 100);
|
||||
window.addEventListener('resize', debouncedRecalc);
|
||||
document.addEventListener('htmx:afterSwap', debouncedRecalc);
|
||||
|
||||
function applyMode(mode){
|
||||
var isList = (mode !== 'thumbs');
|
||||
listView.classList.toggle('hidden', !isList);
|
||||
thumbsView.classList.toggle('hidden', isList);
|
||||
if (listBtn) listBtn.setAttribute('aria-selected', isList ? 'true' : 'false');
|
||||
if (thumbsBtn) thumbsBtn.setAttribute('aria-selected', isList ? 'false' : 'true');
|
||||
try { localStorage.setItem('summaryTypeView', mode); } catch(e) {}
|
||||
if (!isList) recalcThumbCols();
|
||||
}
|
||||
|
||||
if (listBtn && thumbsBtn) {
|
||||
listBtn.addEventListener('click', function(){ applyMode('list'); });
|
||||
thumbsBtn.addEventListener('click', function(){ applyMode('thumbs'); });
|
||||
}
|
||||
var initial = 'list';
|
||||
try { initial = localStorage.getItem('summaryTypeView') || 'list'; } catch(e) {}
|
||||
applyMode(initial);
|
||||
if (initial === 'thumbs') recalcThumbCols();
|
||||
})();
|
||||
</script>
|
||||
<!-- Deck Summary initializer script moved below markup for proper element availability -->
|
||||
|
||||
<!-- Mana Overview Row: Pips • Sources • Curve -->
|
||||
<section style="margin-top:1rem;">
|
||||
|
|
|
|||
|
|
@ -1,12 +1,70 @@
|
|||
<div class="random-result" hx-swap-oob="true" id="random-result">
|
||||
<div class="random-meta">
|
||||
<span class="seed">Seed: {{ seed }}</span>
|
||||
{% if theme %}<span class="theme">Theme: {{ theme }}</span>{% endif %}
|
||||
<div class="random-result" id="random-result">
|
||||
<style>
|
||||
.diag-badges{display:inline-flex; gap:4px; margin-left:8px; flex-wrap:wrap;}
|
||||
.diag-badge{background:var(--panel-alt,#334155); color:#fff; padding:2px 6px; border-radius:12px; font-size:10px; letter-spacing:.5px; line-height:1.2;}
|
||||
.diag-badge.warn{background:#8a6d3b;}
|
||||
.diag-badge.err{background:#7f1d1d;}
|
||||
.diag-badge.fallback{background:#4f46e5;}
|
||||
.btn-compact{font-size:11px; padding:2px 6px; line-height:1.2;}
|
||||
</style>
|
||||
<div class="random-meta" style="display:flex; gap:12px; align-items:center; flex-wrap:wrap;">
|
||||
<span class="seed">Seed: <strong>{{ seed }}</strong></span>
|
||||
{% if theme %}<span class="theme">Theme: <strong>{{ theme }}</strong></span>{% endif %}
|
||||
{% if permalink %}
|
||||
<button class="btn btn-compact" type="button" aria-label="Copy permalink for this exact build" onclick="(async()=>{try{await navigator.clipboard.writeText(location.origin + '{{ permalink }}');(window.toast&&toast('Permalink copied'))||console.log('Permalink copied');}catch(e){alert('Copy failed');}})()">Copy Permalink</button>
|
||||
{% endif %}
|
||||
{% if show_diagnostics and diagnostics %}
|
||||
<span class="diag-badges" aria-label="Diagnostics" role="group">
|
||||
<span class="diag-badge" title="Attempts tried before acceptance">Att {{ diagnostics.attempts }}</span>
|
||||
<span class="diag-badge" title="Elapsed build time in milliseconds">{{ diagnostics.elapsed_ms }}ms</span>
|
||||
{% if diagnostics.timeout_hit %}<span class="diag-badge warn" title="Generation loop exceeded timeout limit before success">Timeout</span>{% endif %}
|
||||
{% if diagnostics.retries_exhausted %}<span class="diag-badge warn" title="All allotted attempts were used without an early acceptable candidate">Retries</span>{% endif %}
|
||||
{% if fallback or diagnostics.fallback %}<span class="diag-badge fallback" title="Original theme produced no candidates; Surprise mode fallback engaged">Fallback</span>{% endif %}
|
||||
</span>
|
||||
{% endif %}
|
||||
</div>
|
||||
<h3 class="commander">{{ commander }}</h3>
|
||||
<ul class="decklist">
|
||||
{% for card in decklist %}
|
||||
<li>{{ card }}</li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
<!-- Hidden current seed so HTMX reroll button can include it via hx-include -->
|
||||
<input type="hidden" id="current-seed" name="seed" value="{{ seed }}" />
|
||||
<input type="hidden" id="current-commander" name="commander" value="{{ commander }}" />
|
||||
<div class="commander-block" style="display:flex; gap:14px; align-items:flex-start; margin-top:.75rem;">
|
||||
<div class="commander-thumb" style="flex:0 0 auto;">
|
||||
<img
|
||||
src="https://api.scryfall.com/cards/named?fuzzy={{ commander|urlencode }}&format=image&version=small"
|
||||
srcset="https://api.scryfall.com/cards/named?fuzzy={{ commander|urlencode }}&format=image&version=small 160w, https://api.scryfall.com/cards/named?fuzzy={{ commander|urlencode }}&format=image&version=normal 488w"
|
||||
sizes="(max-width: 600px) 120px, 160px"
|
||||
alt="{{ commander }} image"
|
||||
width="160" height="220"
|
||||
style="width:160px; height:auto; border-radius:8px; box-shadow:0 6px 18px rgba(0,0,0,.55); border:1px solid var(--border); background:#0f1115;"
|
||||
class="commander-img"
|
||||
loading="lazy" decoding="async"
|
||||
data-card-name="{{ commander }}" />
|
||||
</div>
|
||||
<div style="flex:1 1 auto;">
|
||||
<div class="muted" style="font-size:12px; font-weight:600; letter-spacing:.5px; text-transform:uppercase;">Commander</div>
|
||||
<h3 class="commander" style="margin:.15rem 0 0 0;" data-card-name="{{ commander }}">{{ commander }}</h3>
|
||||
</div>
|
||||
</div>
|
||||
{% if summary %}
|
||||
{# Reuse the comprehensive deck summary partial #}
|
||||
{% include "partials/deck_summary.html" %}
|
||||
{% else %}
|
||||
<ul class="decklist">
|
||||
{% for card in decklist %}
|
||||
{% if card.name %}
|
||||
<li>{{ card.name }}{% if card.count %} ×{{ card.count }}{% endif %}</li>
|
||||
{% else %}
|
||||
<li>{{ card }}</li>
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
</ul>
|
||||
{% endif %}
|
||||
<script>
|
||||
// Re-run bindings after OOB swap so hover & view toggle work consistently
|
||||
(function(){
|
||||
try { if (window.bindAllCardImageRetries) window.bindAllCardImageRetries(); } catch(_) {}
|
||||
try { if (window.attachCardHover) window.attachCardHover(); } catch(_) {}
|
||||
// Deck summary initializer (idempotent) – will assign aria-selected
|
||||
try { if (window.initDeckSummaryTypeView) window.initDeckSummaryTypeView(document.getElementById('random-result')); } catch(_) {}
|
||||
})();
|
||||
</script>
|
||||
</div>
|
||||
|
|
|
|||
274
code/web/templates/random/index.html
Normal file
274
code/web/templates/random/index.html
Normal file
|
|
@ -0,0 +1,274 @@
|
|||
{% extends "base.html" %}
|
||||
{% block content %}
|
||||
{% set enable_ui = random_ui %}
|
||||
<section id="random-modes" aria-labelledby="random-heading">
|
||||
<h2 id="random-heading">Random Modes</h2>
|
||||
{% if not enable_ui %}
|
||||
<div class="notice" role="status">Random UI is disabled. Set <code>RANDOM_UI=1</code> to enable.</div>
|
||||
{% else %}
|
||||
<div class="controls" role="group" aria-label="Random controls" style="display:flex; gap:8px; align-items:center; flex-wrap: wrap;">
|
||||
<label for="random-theme" class="field-label" style="margin-right:6px;">Theme</label>
|
||||
<div style="position:relative;">
|
||||
<input id="random-theme" name="theme" type="text" placeholder="optional (e.g., Tokens)" aria-label="Theme (optional)" autocomplete="off" role="combobox" aria-autocomplete="list" aria-expanded="false" aria-owns="theme-suggest-box" aria-haspopup="listbox" />
|
||||
<div id="theme-suggest-box" role="listbox" style="display:none; position:absolute; top:100%; left:0; right:0; background:var(--panel,#1e293b); border:1px solid var(--border,#334155); z-index:20; max-height:220px; overflow-y:auto; box-shadow:0 4px 10px rgba(0,0,0,.4); font-size:13px;">
|
||||
<!-- suggestions injected here -->
|
||||
</div>
|
||||
</div>
|
||||
{% if show_diagnostics %}
|
||||
<label for="rand-attempts" style="font-size:11px;">Attempts</label>
|
||||
<input id="rand-attempts" name="attempts" type="number" min="1" max="25" value="{{ random_max_attempts }}" style="width:60px; font-size:11px;" title="Override max attempts" />
|
||||
<label for="rand-timeout" style="font-size:11px;">Timeout(ms)</label>
|
||||
<input id="rand-timeout" name="timeout_ms" type="number" min="100" max="15000" step="100" value="{{ random_timeout_ms }}" style="width:80px; font-size:11px;" title="Override generation timeout in milliseconds" />
|
||||
{% endif %}
|
||||
<!-- Added hx-trigger with delay to provide debounce without custom JS recursion -->
|
||||
<button id="btn-surprise" class="btn" hx-post="/hx/random_reroll" hx-vals='{"mode":"surprise"}' hx-include="#random-theme{% if show_diagnostics %},#rand-attempts,#rand-timeout{% endif %}" hx-target="#random-result" hx-swap="outerHTML" hx-trigger="click delay:150ms" hx-disabled-elt="#btn-surprise,#btn-reroll" aria-label="Surprise me">Surprise me</button>
|
||||
<button id="btn-reroll" class="btn" hx-post="/hx/random_reroll" hx-vals='{"mode":"reroll_same_commander"}' hx-include="#current-seed,#current-commander,#random-theme{% if show_diagnostics %},#rand-attempts,#rand-timeout{% endif %}" hx-target="#random-result" hx-swap="outerHTML" hx-trigger="click delay:150ms" hx-disabled-elt="#btn-surprise,#btn-reroll" aria-label="Reroll" disabled>Reroll</button>
|
||||
<button id="btn-share" class="btn" type="button" aria-label="Copy permalink" onclick="(async ()=>{try{const r=await fetch('/build/permalink'); const j=await r.json(); const url=(j.permalink? location.origin + j.permalink : location.href); await navigator.clipboard.writeText(url); (window.toast && toast('Permalink copied')) || alert('Permalink copied');}catch(e){console.error(e); alert('Failed to copy permalink');}})()">Share</button>
|
||||
<span id="spinner" role="status" aria-live="polite" style="display:none; margin-left:8px;">Loading…</span>
|
||||
</div>
|
||||
<div id="rate-limit-banner" role="status" aria-live="polite" style="display:none; margin-top:8px; padding:6px 8px; border:1px solid #cc9900; background:#fff8e1; color:#5f4200; border-radius:4px;">
|
||||
Too many requests. Please wait…
|
||||
</div>
|
||||
<div id="random-area" style="margin-top:12px;">
|
||||
<div id="random-result" class="random-result empty" aria-live="polite">Click “Surprise me” to build a deck.</div>
|
||||
<div id="recent-seeds" style="margin-top:10px; font-size:12px; color:var(--text-muted);">
|
||||
<button id="btn-load-seeds" class="btn" type="button" style="font-size:11px; padding:2px 6px;">Show Recent Seeds</button>
|
||||
<button id="btn-metrics" class="btn" type="button" style="font-size:11px; padding:2px 6px;" title="Download NDJSON metrics" {% if not random_modes %}disabled{% endif %}>Metrics</button>
|
||||
<span id="seed-list" style="margin-left:6px;"></span>
|
||||
<div id="favorite-seeds" style="margin-top:6px;"></div>
|
||||
</div>
|
||||
</div>
|
||||
<script>
|
||||
(function(){
|
||||
// Typeahead: simple debounce + /themes/suggest
|
||||
var input = document.getElementById('random-theme');
|
||||
var listBox = document.getElementById('theme-suggest-box');
|
||||
var to = null;
|
||||
var cache = new Map(); // simple in-memory cache of q -> [names]
|
||||
var activeIndex = -1; // keyboard highlight
|
||||
function hideList(){ if(listBox){ listBox.style.display='none'; input.setAttribute('aria-expanded','false'); activeIndex=-1; } }
|
||||
function highlight(text, q){
|
||||
try{ if(!q) return text; var i=text.toLowerCase().indexOf(q.toLowerCase()); if(i===-1) return text; return text.substring(0,i)+'<mark style="background:#4f46e5; color:#fff; padding:0 2px; border-radius:2px;">'+text.substring(i,i+q.length)+'</mark>'+text.substring(i+q.length);}catch(e){return text;}}
|
||||
function renderList(items, q){
|
||||
if(!listBox) return; listBox.innerHTML=''; activeIndex=-1;
|
||||
if(!items || !items.length){ hideList(); return; }
|
||||
items.slice(0,50).forEach(function(it, idx){
|
||||
var div=document.createElement('div');
|
||||
div.setAttribute('role','option');
|
||||
div.setAttribute('data-value', it);
|
||||
div.innerHTML=highlight(it, q);
|
||||
div.style.cssText='padding:4px 8px; cursor:pointer;';
|
||||
div.addEventListener('mouseenter', function(){ setActive(idx); });
|
||||
div.addEventListener('mousedown', function(ev){ ev.preventDefault(); pick(it); });
|
||||
listBox.appendChild(div);
|
||||
});
|
||||
listBox.style.display='block';
|
||||
input.setAttribute('aria-expanded','true');
|
||||
}
|
||||
function setActive(idx){
|
||||
if(!listBox) return; var children=[...listBox.children];
|
||||
children.forEach(function(c,i){ c.style.background = (i===idx) ? 'rgba(99,102,241,0.35)' : 'transparent'; });
|
||||
activeIndex = idx;
|
||||
}
|
||||
function move(delta){
|
||||
if(!listBox || listBox.style.display==='none'){ return; }
|
||||
var children=[...listBox.children]; if(!children.length) return;
|
||||
var next = activeIndex + delta; if(next < 0) next = children.length -1; if(next >= children.length) next = 0;
|
||||
setActive(next);
|
||||
var el = children[next]; if(el && el.scrollIntoView){ el.scrollIntoView({block:'nearest'}); }
|
||||
}
|
||||
function pick(value){ input.value = value; hideList(); input.dispatchEvent(new Event('change')); }
|
||||
function updateList(items, q){ renderList(items, q); }
|
||||
function showRateLimitBanner(seconds){
|
||||
var b = document.getElementById('rate-limit-banner');
|
||||
var btn1 = document.getElementById('btn-surprise');
|
||||
var btn2 = document.getElementById('btn-reroll');
|
||||
if(!b){ return; }
|
||||
var secs = (typeof seconds === 'number' && !isNaN(seconds) && seconds > 0) ? Math.floor(seconds) : null;
|
||||
var base = 'Too many requests';
|
||||
var update = function(){
|
||||
if(secs !== null){ b.textContent = base + ' — try again in ' + secs + 's'; }
|
||||
else { b.textContent = base + ' — please try again shortly'; }
|
||||
};
|
||||
update();
|
||||
b.style.display = 'block';
|
||||
if(btn1) btn1.disabled = true; if(btn2) btn2.disabled = true;
|
||||
if(secs !== null){
|
||||
var t = setInterval(function(){
|
||||
secs -= 1; update();
|
||||
if(secs <= 0){ clearInterval(t); b.style.display = 'none'; if(btn1) btn1.disabled = false; if(btn2) btn2.disabled = false; }
|
||||
}, 1000);
|
||||
}
|
||||
}
|
||||
|
||||
function highlightMatch(item, q){
|
||||
try{
|
||||
var idx = item.toLowerCase().indexOf(q.toLowerCase());
|
||||
if(idx === -1) return item;
|
||||
return item.substring(0,idx) + '[[' + item.substring(idx, idx+q.length) + ']]' + item.substring(idx+q.length);
|
||||
}catch(e){ return item; }
|
||||
}
|
||||
async function fetchSuggest(q){
|
||||
try{
|
||||
var u = '/themes/api/suggest' + (q? ('?q=' + encodeURIComponent(q)) : '');
|
||||
if(cache.has(q)) { updateList(cache.get(q)); return; }
|
||||
var r = await fetch(u);
|
||||
if(r.status === 429){
|
||||
var ra = r.headers.get('Retry-After');
|
||||
var secs = ra ? parseInt(ra, 10) : null;
|
||||
var msg = 'You are being rate limited';
|
||||
if(secs && !isNaN(secs)) msg += ' — retry in ' + secs + 's';
|
||||
if(window.toast) { toast(msg); } else { console.warn(msg); }
|
||||
showRateLimitBanner(secs);
|
||||
return updateList([]);
|
||||
}
|
||||
if(!r.ok) return updateList([]);
|
||||
var j = await r.json();
|
||||
var items = (j && j.themes) || [];
|
||||
cache.set(q, items);
|
||||
// cap cache size to 50
|
||||
if(cache.size > 50){
|
||||
var firstKey = cache.keys().next().value; cache.delete(firstKey);
|
||||
}
|
||||
updateList(items, q);
|
||||
}catch(e){ /* no-op */ }
|
||||
}
|
||||
if(input){
|
||||
input.addEventListener('input', function(){
|
||||
var q = input.value || '';
|
||||
if(to) clearTimeout(to);
|
||||
if(!q || q.length < 2){ hideList(); return; }
|
||||
to = setTimeout(function(){ fetchSuggest(q); }, 150);
|
||||
});
|
||||
input.addEventListener('keydown', function(ev){
|
||||
if(ev.key === 'ArrowDown'){ ev.preventDefault(); move(1); }
|
||||
else if(ev.key === 'ArrowUp'){ ev.preventDefault(); move(-1); }
|
||||
else if(ev.key === 'Enter'){ if(activeIndex >=0 && listBox && listBox.children[activeIndex]){ ev.preventDefault(); pick(listBox.children[activeIndex].getAttribute('data-value')); } }
|
||||
else if(ev.key === 'Escape'){ hideList(); }
|
||||
});
|
||||
document.addEventListener('click', function(ev){ if(!listBox) return; if(ev.target === input || listBox.contains(ev.target)){ return; } hideList(); });
|
||||
}
|
||||
// Relying on hx-trigger delay (150ms) for soft debounce. Added hx-disabled-elt to avoid rapid spamming.
|
||||
document.addEventListener('htmx:afterRequest', function(){
|
||||
// Safety: ensure buttons are always re-enabled after request completes
|
||||
var b1=document.getElementById('btn-surprise'); var b2=document.getElementById('btn-reroll');
|
||||
if(b1) b1.disabled=false; if(b2 && document.getElementById('current-seed')) b2.disabled=false;
|
||||
});
|
||||
// (No configRequest hook needed; using hx-vals + hx-include for simple form-style submission.)
|
||||
// Enable reroll once a result exists
|
||||
document.addEventListener('htmx:afterSwap', function(ev){
|
||||
if (ev && ev.detail && ev.detail.target && ev.detail.target.id === 'random-result'){
|
||||
var rr = document.getElementById('btn-reroll'); if (rr) rr.disabled = false;
|
||||
// Refresh recent seeds asynchronously
|
||||
fetch('/api/random/seeds').then(r=>r.json()).then(function(j){
|
||||
try{
|
||||
if(!j || !j.seeds) return; var span=document.getElementById('seed-list'); if(!span) return;
|
||||
span.textContent = j.seeds.join(', ');
|
||||
}catch(e){}
|
||||
}).catch(function(){});
|
||||
}
|
||||
});
|
||||
// Simple spinner hooks
|
||||
document.addEventListener('htmx:beforeRequest', function(){ var s=document.getElementById('spinner'); if(s) s.style.display='inline-block'; });
|
||||
document.addEventListener('htmx:afterRequest', function(){ var s=document.getElementById('spinner'); if(s) s.style.display='none'; });
|
||||
// HTMX-friendly rate limit message on 429 + countdown banner
|
||||
document.addEventListener('htmx:afterOnLoad', function(ev){
|
||||
try{
|
||||
var xhr = ev && ev.detail && ev.detail.xhr; if(!xhr) return;
|
||||
if(xhr.status === 429){
|
||||
var ra = xhr.getResponseHeader('Retry-After');
|
||||
var secs = ra ? parseInt(ra, 10) : null;
|
||||
var msg = 'Too many requests';
|
||||
if(secs && !isNaN(secs)) msg += ' — try again in ' + secs + 's';
|
||||
if(window.toast) { toast(msg); } else { alert(msg); }
|
||||
showRateLimitBanner(secs);
|
||||
}
|
||||
}catch(e){/* no-op */}
|
||||
});
|
||||
|
||||
function favoriteButton(seed, favorites){
|
||||
var isFav = favorites.includes(seed);
|
||||
var b=document.createElement('button');
|
||||
b.type='button';
|
||||
b.textContent = isFav ? '★' : '☆';
|
||||
b.title = isFav ? 'Remove from favorites' : 'Add to favorites';
|
||||
b.style.cssText='font-size:12px; margin-left:2px; padding:0 4px; line-height:1;';
|
||||
b.addEventListener('click', function(ev){
|
||||
ev.stopPropagation();
|
||||
fetch('/api/random/seed_favorite', {method:'POST', headers:{'Content-Type':'application/json'}, body: JSON.stringify({seed: seed})})
|
||||
.then(r=>r.json()).then(function(){
|
||||
// refresh seeds display
|
||||
loadSeeds(true);
|
||||
}).catch(()=>{});
|
||||
});
|
||||
return b;
|
||||
}
|
||||
function renderFavorites(favorites){
|
||||
var container=document.getElementById('favorite-seeds'); if(!container) return;
|
||||
if(!favorites || !favorites.length){ container.textContent=''; return; }
|
||||
container.innerHTML='<span style="margin-right:4px;">Favorites:</span>';
|
||||
favorites.forEach(function(s){
|
||||
var btn=document.createElement('button'); btn.type='button'; btn.className='btn'; btn.textContent=s; btn.style.cssText='font-size:10px; margin-right:4px; padding:2px 5px;';
|
||||
btn.addEventListener('click', function(){
|
||||
fetch('/hx/random_reroll', {method:'POST', headers:{'Content-Type':'application/json'}, body: JSON.stringify({ seed: s-1, theme: document.getElementById('random-theme').value || null }) })
|
||||
.then(r=>r.text()).then(html=>{ var target=document.getElementById('random-result'); if(target){ target.outerHTML=html; } });
|
||||
});
|
||||
container.appendChild(btn);
|
||||
});
|
||||
}
|
||||
function renderSeedList(seeds, favorites){
|
||||
var span=document.getElementById('seed-list'); if(!span) return;
|
||||
if(!seeds || !seeds.length){ span.textContent='(none yet)'; return; }
|
||||
span.innerHTML='';
|
||||
seeds.slice().forEach(function(s){
|
||||
var b=document.createElement('button');
|
||||
b.type='button';
|
||||
b.textContent=s;
|
||||
b.className='btn seed-btn';
|
||||
b.style.cssText='font-size:10px; margin-right:4px; padding:2px 5px;';
|
||||
b.setAttribute('aria-label','Rebuild using seed '+s);
|
||||
b.addEventListener('click', function(){
|
||||
// Post to reroll endpoint but treat as explicit seed build
|
||||
fetch('/hx/random_reroll', {method:'POST', headers:{'Content-Type':'application/json'}, body: JSON.stringify({ seed: s-1, theme: document.getElementById('random-theme').value || null }) })
|
||||
.then(r=> r.text())
|
||||
.then(html=>{ var target=document.getElementById('random-result'); if(target){ target.outerHTML=html; } })
|
||||
.catch(()=>{});
|
||||
});
|
||||
span.appendChild(b);
|
||||
span.appendChild(favoriteButton(s, favorites || []));
|
||||
});
|
||||
}
|
||||
function loadSeeds(refreshFavs){
|
||||
fetch('/api/random/seeds').then(r=>r.json()).then(function(j){
|
||||
if(!j){ renderSeedList([]); return; }
|
||||
renderSeedList(j.seeds || [], j.favorites || []);
|
||||
if(refreshFavs) renderFavorites(j.favorites || []);
|
||||
}).catch(function(){ var span=document.getElementById('seed-list'); if(span) span.textContent='(error)'; });
|
||||
}
|
||||
|
||||
// Manual load seeds button
|
||||
var btnSeeds = document.getElementById('btn-load-seeds');
|
||||
if(btnSeeds){ btnSeeds.addEventListener('click', function(){ loadSeeds(true); }); }
|
||||
var btnMetrics = document.getElementById('btn-metrics');
|
||||
if(btnMetrics){
|
||||
btnMetrics.addEventListener('click', function(){
|
||||
fetch('/status/random_metrics_ndjson').then(r=>r.text()).then(function(t){
|
||||
try{ var blob=new Blob([t], {type:'application/x-ndjson'}); var url=URL.createObjectURL(blob); var a=document.createElement('a'); a.href=url; a.download='random_metrics.ndjson'; document.body.appendChild(a); a.click(); setTimeout(function(){ URL.revokeObjectURL(url); a.remove(); }, 1000);}catch(e){ console.error(e); }
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
// Persist last used theme in localStorage
|
||||
try {
|
||||
var THEME_KEY='random_last_theme';
|
||||
if(input){
|
||||
var prev = localStorage.getItem(THEME_KEY);
|
||||
if(prev && !input.value){ input.value = prev; }
|
||||
input.addEventListener('change', function(){ localStorage.setItem(THEME_KEY, input.value || ''); });
|
||||
}
|
||||
} catch(e) { /* ignore */ }
|
||||
})();
|
||||
</script>
|
||||
{% endif %}
|
||||
</section>
|
||||
{% endblock %}
|
||||
|
|
@ -25,9 +25,18 @@ services:
|
|||
ALLOW_MUST_HAVES: "1" # 1=enable must-include/must-exclude cards feature; 0=disable
|
||||
SHOW_MISC_POOL: "0"
|
||||
WEB_THEME_PICKER_DIAGNOSTICS: "1" # 1=enable extra theme catalog diagnostics fields, uncapped view & /themes/metrics
|
||||
# Sampling experiments
|
||||
# SPLASH_ADAPTIVE: "0" # 1=enable adaptive splash penalty scaling by commander color count
|
||||
# SPLASH_ADAPTIVE_SCALE: "1:1.0,2:1.0,3:1.0,4:0.6,5:0.35" # override default scaling
|
||||
# Sampling experiments
|
||||
# SPLASH_ADAPTIVE: "0" # 1=enable adaptive splash penalty scaling by commander color count
|
||||
# SPLASH_ADAPTIVE_SCALE: "1:1.0,2:1.0,3:1.0,4:0.6,5:0.35" # override default scaling
|
||||
# Rarity weighting (advanced; default weights tuned for variety)
|
||||
# RARITY_W_MYTHIC: "1.2"
|
||||
# RARITY_W_RARE: "0.9"
|
||||
# RARITY_W_UNCOMMON: "0.65"
|
||||
# RARITY_W_COMMON: "0.4"
|
||||
# Diversity targets (optional): e.g., "mythic:0-1,rare:0-2,uncommon:0-4,common:0-6"
|
||||
# RARITY_DIVERSITY_TARGETS: ""
|
||||
# Penalty if exceeding diversity targets (negative lowers score)
|
||||
# RARITY_DIVERSITY_OVER_PENALTY: "-0.5"
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Random Build (Alpha) Feature Flags
|
||||
|
|
@ -38,10 +47,11 @@ services:
|
|||
# ------------------------------------------------------------------
|
||||
|
||||
# Random Modes (feature flags)
|
||||
RANDOM_MODES: "0" # 1=enable random build endpoints and backend features
|
||||
RANDOM_UI: "0" # 1=show Surprise/Theme/Reroll/Share controls in UI
|
||||
RANDOM_MODES: "1" # 1=enable random build endpoints and backend features
|
||||
RANDOM_UI: "1" # 1=show Surprise/Theme/Reroll/Share controls in UI
|
||||
RANDOM_MAX_ATTEMPTS: "5" # cap retry attempts
|
||||
RANDOM_TIMEOUT_MS: "5000" # per-build timeout in ms
|
||||
# RANDOM_BUILD_SUPPRESS_INITIAL_EXPORT: "1" # (now defaults to 1 automatically for random builds; set to 0 to force legacy double-export behavior)
|
||||
|
||||
# Theming
|
||||
THEME: "dark" # system|light|dark
|
||||
|
|
@ -64,6 +74,10 @@ services:
|
|||
WEB_TAG_WORKERS: "4" # Worker count when parallel tagging
|
||||
THEME_CATALOG_MODE: "merge" # Use merged Phase B catalog builder (with YAML export)
|
||||
THEME_YAML_FAST_SKIP: "0" # 1=allow skipping per-theme YAML on fast path (rare; default always export)
|
||||
# Live YAML scan interval in seconds for change detection (dev convenience)
|
||||
# THEME_CATALOG_YAML_SCAN_INTERVAL_SEC: "2.0"
|
||||
# Prewarm common theme filters at startup (speeds first interactions)
|
||||
# WEB_THEME_FILTER_PREWARM: "0"
|
||||
WEB_AUTO_ENFORCE: "0" # 1=auto-run compliance export after builds
|
||||
WEB_CUSTOM_EXPORT_BASE: "" # Optional: custom base dir for deck export artifacts
|
||||
APP_VERSION: "dev" # Displayed version label (set per release/tag)
|
||||
|
|
@ -95,6 +109,10 @@ services:
|
|||
# DECK_CONFIG: "/app/config" # Where the config browser looks for *.json
|
||||
# OWNED_CARDS_DIR: "/app/owned_cards" # Preferred path for owned inventory uploads
|
||||
# CARD_LIBRARY_DIR: "/app/owned_cards" # Back-compat alias for OWNED_CARDS_DIR
|
||||
# CSV base directory override (useful for testing with frozen snapshots)
|
||||
# CSV_FILES_DIR: "/app/csv_files"
|
||||
# Inject a one-off synthetic CSV for index testing without altering shards
|
||||
# CARD_INDEX_EXTRA_CSV: ""
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Headless / Non-interactive Build Configuration
|
||||
|
|
@ -139,22 +157,45 @@ services:
|
|||
# SHOW_MISC_POOL: "1" # (already above) expose misc pool debug UI if implemented
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Editorial / Theme Catalog (Phase D) Controls
|
||||
# These drive automated description generation, popularity bucketing,
|
||||
# YAML backfilling, and regression / metrics exports. Normally only
|
||||
# used during catalog curation or CI.
|
||||
# ------------------------------------------------------------------
|
||||
# EDITORIAL_SEED: "1234" # Deterministic seed for description & inference ordering.
|
||||
# EDITORIAL_AGGRESSIVE_FILL: "0" # 1=borrow extra synergies for sparse themes (<2 curated/enforced).
|
||||
# EDITORIAL_POP_BOUNDARIES: "50,120,250,600" # Override popularity bucket boundaries (4 comma ints).
|
||||
# EDITORIAL_POP_EXPORT: "0" # 1=emit theme_popularity_metrics.json alongside theme_list.json.
|
||||
# EDITORIAL_BACKFILL_YAML: "0" # 1=enable YAML metadata backfill (description/popularity) on build.
|
||||
# EDITORIAL_INCLUDE_FALLBACK_SUMMARY: "0" # 1=include description_fallback_summary block in JSON output.
|
||||
# EDITORIAL_REQUIRE_DESCRIPTION: "0" # (lint script) 1=fail if a theme lacks description.
|
||||
# EDITORIAL_REQUIRE_POPULARITY: "0" # (lint script) 1=fail if a theme lacks popularity bucket.
|
||||
# EDITORIAL_MIN_EXAMPLES: "0" # (future) minimum curated example commanders/cards (guard rails).
|
||||
# EDITORIAL_MIN_EXAMPLES_ENFORCE: "0" # (future) 1=enforce above threshold; else warn only.
|
||||
# ------------------------------------------------------------------
|
||||
# Editorial / Theme Catalog Controls
|
||||
# These drive automated description generation, popularity bucketing,
|
||||
# YAML backfilling, and regression / metrics exports. Normally only
|
||||
# used during catalog curation or CI.
|
||||
# ------------------------------------------------------------------
|
||||
# EDITORIAL_SEED: "1234" # Deterministic seed for description & inference ordering.
|
||||
# EDITORIAL_AGGRESSIVE_FILL: "0" # 1=borrow extra synergies for sparse themes (<2 curated/enforced).
|
||||
# EDITORIAL_POP_BOUNDARIES: "50,120,250,600" # Override popularity bucket boundaries (4 comma ints).
|
||||
# EDITORIAL_POP_EXPORT: "0" # 1=emit theme_popularity_metrics.json alongside theme_list.json.
|
||||
# EDITORIAL_BACKFILL_YAML: "0" # 1=enable YAML metadata backfill (description/popularity) on build.
|
||||
# EDITORIAL_INCLUDE_FALLBACK_SUMMARY: "0" # 1=include description_fallback_summary block in JSON output.
|
||||
# EDITORIAL_REQUIRE_DESCRIPTION: "0" # (lint script) 1=fail if a theme lacks description.
|
||||
# EDITORIAL_REQUIRE_POPULARITY: "0" # (lint script) 1=fail if a theme lacks popularity bucket.
|
||||
# EDITORIAL_MIN_EXAMPLES: "0" # (future) minimum curated example commanders/cards (guard rails).
|
||||
# EDITORIAL_MIN_EXAMPLES_ENFORCE: "0" # (future) 1=enforce above threshold; else warn only.
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Theme Preview Cache & Redis (optional)
|
||||
# Controls for the theme preview caching layer; defaults are sane for most users.
|
||||
# Uncomment to tune or enable Redis read-through/write-through caching.
|
||||
# ------------------------------------------------------------------
|
||||
# In-memory cache sizing and logging
|
||||
# THEME_PREVIEW_CACHE_MAX: "400" # Max previews cached in memory
|
||||
# WEB_THEME_PREVIEW_LOG: "0" # 1=verbose preview cache logs
|
||||
# Adaptive eviction/background refresh
|
||||
# THEME_PREVIEW_ADAPTIVE: "0" # 1=enable adaptive cache policy
|
||||
# THEME_PREVIEW_EVICT_COST_THRESHOLDS: "5,15,40" # cost thresholds for eviction tiers
|
||||
# THEME_PREVIEW_BG_REFRESH: "0" # 1=background refresh worker
|
||||
# THEME_PREVIEW_BG_REFRESH_INTERVAL: "120" # seconds between background refresh sweeps
|
||||
# TTL policy (advanced)
|
||||
# THEME_PREVIEW_TTL_BASE: "300" # base seconds
|
||||
# THEME_PREVIEW_TTL_MIN: "60"
|
||||
# THEME_PREVIEW_TTL_MAX: "900"
|
||||
# THEME_PREVIEW_TTL_BANDS: "0.2,0.5,0.8" # low_critical, low_moderate, high_grow (fractions)
|
||||
# THEME_PREVIEW_TTL_STEPS: "2,4,2,3,1" # step counts for band progression
|
||||
# Redis backend (optional)
|
||||
# THEME_PREVIEW_REDIS_URL: "redis://redis:6379/0"
|
||||
# THEME_PREVIEW_REDIS_DISABLE: "0" # 1=force disable redis even if URL is set
|
||||
volumes:
|
||||
- ${PWD}/deck_files:/app/deck_files
|
||||
- ${PWD}/logs:/app/logs
|
||||
|
|
|
|||
|
|
@ -22,9 +22,18 @@ services:
|
|||
WEB_VIRTUALIZE: "1" # 1=enable list virtualization in Step 5
|
||||
ALLOW_MUST_HAVES: "1" # Include/Exclude feature enable
|
||||
WEB_THEME_PICKER_DIAGNOSTICS: "0" # 1=enable extra theme catalog diagnostics fields, uncapped synergies & /themes/metrics
|
||||
# Sampling experiments (optional)
|
||||
# SPLASH_ADAPTIVE: "0" # 1=enable adaptive splash penalty scaling by commander color count
|
||||
# SPLASH_ADAPTIVE_SCALE: "1:1.0,2:1.0,3:1.0,4:0.6,5:0.35" # override default scaling
|
||||
# Sampling experiments (optional)
|
||||
# SPLASH_ADAPTIVE: "0" # 1=enable adaptive splash penalty scaling by commander color count
|
||||
# SPLASH_ADAPTIVE_SCALE: "1:1.0,2:1.0,3:1.0,4:0.6,5:0.35" # override default scaling
|
||||
# Rarity weighting (advanced; default weights tuned for variety)
|
||||
# RARITY_W_MYTHIC: "1.2"
|
||||
# RARITY_W_RARE: "0.9"
|
||||
# RARITY_W_UNCOMMON: "0.65"
|
||||
# RARITY_W_COMMON: "0.4"
|
||||
# Diversity targets (optional): e.g., "mythic:0-1,rare:0-2,uncommon:0-4,common:0-6"
|
||||
# RARITY_DIVERSITY_TARGETS: ""
|
||||
# Penalty if exceeding diversity targets (negative lowers score)
|
||||
# RARITY_DIVERSITY_OVER_PENALTY: "-0.5"
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Random Build (Alpha) Feature Flags
|
||||
|
|
@ -46,6 +55,10 @@ services:
|
|||
WEB_TAG_WORKERS: "4" # Worker count (CPU bound; tune as needed)
|
||||
THEME_CATALOG_MODE: "merge" # Phase B merged theme builder
|
||||
THEME_YAML_FAST_SKIP: "0" # 1=allow skipping YAML export on fast path (default 0 = always export)
|
||||
# Live YAML scan interval in seconds for change detection (dev convenience)
|
||||
# THEME_CATALOG_YAML_SCAN_INTERVAL_SEC: "2.0"
|
||||
# Prewarm common theme filters at startup (speeds first interactions)
|
||||
# WEB_THEME_FILTER_PREWARM: "0"
|
||||
WEB_AUTO_ENFORCE: "0" # 1=auto compliance JSON export after builds
|
||||
WEB_CUSTOM_EXPORT_BASE: "" # Optional export base override
|
||||
APP_VERSION: "v2.2.10" # Displayed in footer/health
|
||||
|
|
@ -68,6 +81,10 @@ services:
|
|||
# DECK_CONFIG: "/app/config"
|
||||
# OWNED_CARDS_DIR: "/app/owned_cards"
|
||||
# CARD_LIBRARY_DIR: "/app/owned_cards" # legacy alias
|
||||
# CSV base directory override (useful for testing with frozen snapshots)
|
||||
# CSV_FILES_DIR: "/app/csv_files"
|
||||
# Inject a one-off synthetic CSV for index testing without altering shards
|
||||
# CARD_INDEX_EXTRA_CSV: ""
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Headless / CLI Mode (optional automation)
|
||||
|
|
@ -104,21 +121,44 @@ services:
|
|||
# PORT: "8080" # Uvicorn port
|
||||
# WORKERS: "1" # Uvicorn workers
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Editorial / Theme Catalog (Phase D) Controls (advanced / optional)
|
||||
# These are primarily for maintainers refining automated theme
|
||||
# descriptions & popularity analytics. Leave commented for normal use.
|
||||
# ------------------------------------------------------------------
|
||||
# EDITORIAL_SEED: "1234" # Deterministic seed for reproducible ordering.
|
||||
# EDITORIAL_AGGRESSIVE_FILL: "0" # 1=borrow extra synergies for sparse themes.
|
||||
# EDITORIAL_POP_BOUNDARIES: "50,120,250,600" # Override popularity bucket thresholds (4 ints).
|
||||
# EDITORIAL_POP_EXPORT: "0" # 1=emit theme_popularity_metrics.json.
|
||||
# EDITORIAL_BACKFILL_YAML: "0" # 1=write description/popularity back to YAML (missing only).
|
||||
# EDITORIAL_INCLUDE_FALLBACK_SUMMARY: "0" # 1=include fallback description usage summary in JSON.
|
||||
# EDITORIAL_REQUIRE_DESCRIPTION: "0" # (lint) 1=fail if any theme lacks description.
|
||||
# EDITORIAL_REQUIRE_POPULARITY: "0" # (lint) 1=fail if any theme lacks popularity bucket.
|
||||
# EDITORIAL_MIN_EXAMPLES: "0" # (future) minimum curated examples target.
|
||||
# EDITORIAL_MIN_EXAMPLES_ENFORCE: "0" # (future) enforce above threshold vs warn.
|
||||
# ------------------------------------------------------------------
|
||||
# Editorial / Theme Catalog Controls (advanced / optional)
|
||||
# These are primarily for maintainers refining automated theme
|
||||
# descriptions & popularity analytics. Leave commented for normal use.
|
||||
# ------------------------------------------------------------------
|
||||
# EDITORIAL_SEED: "1234" # Deterministic seed for reproducible ordering.
|
||||
# EDITORIAL_AGGRESSIVE_FILL: "0" # 1=borrow extra synergies for sparse themes.
|
||||
# EDITORIAL_POP_BOUNDARIES: "50,120,250,600" # Override popularity bucket thresholds (4 ints).
|
||||
# EDITORIAL_POP_EXPORT: "0" # 1=emit theme_popularity_metrics.json.
|
||||
# EDITORIAL_BACKFILL_YAML: "0" # 1=write description/popularity back to YAML (missing only).
|
||||
# EDITORIAL_INCLUDE_FALLBACK_SUMMARY: "0" # 1=include fallback description usage summary in JSON.
|
||||
# EDITORIAL_REQUIRE_DESCRIPTION: "0" # (lint) 1=fail if any theme lacks description.
|
||||
# EDITORIAL_REQUIRE_POPULARITY: "0" # (lint) 1=fail if any theme lacks popularity bucket.
|
||||
# EDITORIAL_MIN_EXAMPLES: "0" # (future) minimum curated examples target.
|
||||
# EDITORIAL_MIN_EXAMPLES_ENFORCE: "0" # (future) enforce above threshold vs warn.
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Theme Preview Cache & Redis (optional)
|
||||
# Controls for the theme preview caching layer; defaults are sane for most users.
|
||||
# Uncomment to tune or enable Redis read-through/write-through caching.
|
||||
# ------------------------------------------------------------------
|
||||
# In-memory cache sizing and logging
|
||||
# THEME_PREVIEW_CACHE_MAX: "400" # Max previews cached in memory
|
||||
# WEB_THEME_PREVIEW_LOG: "0" # 1=verbose preview cache logs
|
||||
# Adaptive eviction/background refresh
|
||||
# THEME_PREVIEW_ADAPTIVE: "0" # 1=enable adaptive cache policy
|
||||
# THEME_PREVIEW_EVICT_COST_THRESHOLDS: "5,15,40" # cost thresholds for eviction tiers
|
||||
# THEME_PREVIEW_BG_REFRESH: "0" # 1=background refresh worker
|
||||
# THEME_PREVIEW_BG_REFRESH_INTERVAL: "120" # seconds between background refresh sweeps
|
||||
# TTL policy (advanced)
|
||||
# THEME_PREVIEW_TTL_BASE: "300" # base seconds
|
||||
# THEME_PREVIEW_TTL_MIN: "60"
|
||||
# THEME_PREVIEW_TTL_MAX: "900"
|
||||
# THEME_PREVIEW_TTL_BANDS: "0.2,0.5,0.8" # low_critical, low_moderate, high_grow (fractions)
|
||||
# THEME_PREVIEW_TTL_STEPS: "2,4,2,3,1" # step counts for band progression
|
||||
# Redis backend (optional)
|
||||
# THEME_PREVIEW_REDIS_URL: "redis://redis:6379/0"
|
||||
# THEME_PREVIEW_REDIS_DISABLE: "0" # 1=force disable redis even if URL is set
|
||||
volumes:
|
||||
- ${PWD}/deck_files:/app/deck_files
|
||||
- ${PWD}/logs:/app/logs
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue