mirror of
https://github.com/mwisnowski/mtg_python_deckbuilder.git
synced 2025-12-16 15:40:12 +01:00
Merge pull request #50 from mwisnowski/maintenance/web-unification
Web UI Architecture Improvements: Modern Stack & Quality Enhancements
This commit is contained in:
commit
c5774a04f1
183 changed files with 19742 additions and 4714 deletions
|
|
@ -106,6 +106,9 @@ WEB_TAG_PARALLEL=1 # dockerhub: WEB_TAG_PARALLEL="1"
|
|||
WEB_TAG_WORKERS=2 # dockerhub: WEB_TAG_WORKERS="4"
|
||||
WEB_AUTO_ENFORCE=0 # dockerhub: WEB_AUTO_ENFORCE="0"
|
||||
|
||||
# Card Image Caching (optional, uses Scryfall bulk data API)
|
||||
CACHE_CARD_IMAGES=1 # dockerhub: CACHE_CARD_IMAGES="1" (1=download images to card_files/images/, 0=fetch from Scryfall API on demand)
|
||||
|
||||
# Build Stage Ordering
|
||||
WEB_STAGE_ORDER=new # new|legacy. 'new' (default): creatures → spells → lands → fill. 'legacy': lands → creatures → spells → fill
|
||||
|
||||
|
|
|
|||
11
.gitignore
vendored
11
.gitignore
vendored
|
|
@ -9,6 +9,7 @@
|
|||
|
||||
RELEASE_NOTES.md
|
||||
test.py
|
||||
test_*.py
|
||||
!test_exclude_cards.txt
|
||||
!test_include_exclude_config.json
|
||||
|
||||
|
|
@ -41,3 +42,13 @@ logs/*
|
|||
!logs/perf/
|
||||
logs/perf/*
|
||||
!logs/perf/theme_preview_warm_baseline.json
|
||||
|
||||
# Node.js and build artifacts
|
||||
node_modules/
|
||||
code/web/static/js/
|
||||
code/web/static/styles.css
|
||||
*.js.map
|
||||
|
||||
# Keep TypeScript sources and Tailwind CSS input
|
||||
!code/web/static/ts/
|
||||
!code/web/static/tailwind.css
|
||||
96
CHANGELOG.md
96
CHANGELOG.md
|
|
@ -9,14 +9,56 @@ This format follows Keep a Changelog principles and aims for Semantic Versioning
|
|||
|
||||
## [Unreleased]
|
||||
### Added
|
||||
- **Build X and Compare** feature: Build multiple decks with same configuration and compare results side-by-side
|
||||
- Build 1-10 decks in parallel to see variance from card selection randomness
|
||||
- Real-time progress tracking with dynamic time estimates based on color count
|
||||
- Comparison view with card overlap statistics and individual build summaries
|
||||
- Smart filtering excludes guaranteed cards (basics, staples) from "Most Common Cards"
|
||||
- Card hover support throughout comparison interface
|
||||
- Rebuild button to rerun same configuration
|
||||
- Export all decks as ZIP archive
|
||||
- **Template Validation Tests**: Comprehensive test suite for HTML/Jinja2 templates
|
||||
- Validates Jinja2 syntax across all templates
|
||||
- Checks HTML structure (balanced tags, unique IDs, proper attributes)
|
||||
- Basic accessibility validation (alt text, form labels, button types)
|
||||
- Regression prevention thresholds to maintain code quality
|
||||
- **Code Quality Tools**: Enhanced development tooling for maintainability
|
||||
- Automated utilities for code cleanup
|
||||
- Improved type checking configuration
|
||||
- **Card Image Caching**: Optional local image cache for faster card display
|
||||
- Downloads card images from Scryfall bulk data (respects API guidelines)
|
||||
- Graceful fallback to Scryfall API for uncached images
|
||||
- Enabled via `CACHE_CARD_IMAGES=1` environment variable
|
||||
- Integrated with setup/tagging process
|
||||
- Statistics endpoint with intelligent caching (weekly refresh, matching card data staleness)
|
||||
- **Component Library**: Living documentation of reusable UI components at `/docs/components`
|
||||
- Interactive examples of all buttons, modals, forms, cards, and panels
|
||||
- Jinja2 macros for consistent component usage
|
||||
- Component partial templates for reuse across pages
|
||||
- **TypeScript Migration**: Migrated JavaScript codebase to TypeScript for better type safety
|
||||
- Converted `components.js` (376 lines) and `app.js` (1390 lines) to TypeScript
|
||||
- Created shared type definitions for state management, telemetry, HTMX, and UI components
|
||||
- Integrated TypeScript compilation into build process (`npm run build:ts`)
|
||||
- Compiled JavaScript output in `code/web/static/js/` directory
|
||||
- Docker build automatically compiles TypeScript during image creation
|
||||
|
||||
### Changed
|
||||
- **Inline JavaScript Cleanup**: Removed legacy card hover system (~230 lines of unused code)
|
||||
- **JavaScript Consolidation**: Extracted inline scripts to TypeScript modules
|
||||
- Created `cardHover.ts` for unified hover panel functionality
|
||||
- Created `cardImages.ts` for card image loading with automatic retry fallbacks
|
||||
- Reduced inline script size in base template for better maintainability
|
||||
- **Migrated CSS to Tailwind**: Consolidated and unified CSS architecture
|
||||
- Tailwind CSS v3 with custom MTG color palette
|
||||
- PostCSS build pipeline with autoprefixer
|
||||
- Reduced inline styles in templates (moved to shared CSS classes)
|
||||
- Organized CSS into functional sections with clear documentation
|
||||
- **Theme Visual Improvements**: Enhanced readability and consistency across all theme modes
|
||||
- Light mode: Darker text for improved readability, warm earth tone color palette
|
||||
- Dark mode: Refined contrast for better visual hierarchy
|
||||
- High-contrast mode: Optimized for maximum accessibility
|
||||
- Consistent hover states across all interactive elements
|
||||
- Improved visibility of form inputs and controls
|
||||
- **JavaScript Modernization**: Updated to modern JavaScript patterns
|
||||
- Converted `var` declarations to `const`/`let`
|
||||
- Added TypeScript type annotations for better IDE support and error catching
|
||||
- Consolidated event handlers and utility functions
|
||||
- **Docker Build Optimization**: Improved developer experience
|
||||
- Hot reload enabled for templates and static files
|
||||
- Volume mounts for rapid iteration without rebuilds
|
||||
- **Template Modernization**: Migrated templates to use component system
|
||||
- **Intelligent Synergy Builder**: Analyze multiple builds and create optimized "best-of" deck
|
||||
- Scores cards by frequency (50%), EDHREC rank (25%), and theme tags (25%)
|
||||
- 10% bonus for cards appearing in 80%+ of builds
|
||||
|
|
@ -27,18 +69,46 @@ This format follows Keep a Changelog principles and aims for Semantic Versioning
|
|||
- `ENABLE_BATCH_BUILD` environment variable to toggle feature (default: enabled)
|
||||
- Detailed progress logging for multi-build orchestration
|
||||
- User guide: `docs/user_guides/batch_build_compare.md`
|
||||
- **Web UI Component Library**: Standardized UI components for consistent design across all pages
|
||||
- 5 component partial template files (buttons, modals, forms, cards, panels)
|
||||
- ~900 lines of component CSS styles
|
||||
- Interactive JavaScript utilities (components.js)
|
||||
- Living component library page at `/docs/components`
|
||||
- 1600+ lines developer documentation (component_catalog.md)
|
||||
- **Custom UI Enhancements**:
|
||||
- Darker gray styling for home page buttons
|
||||
- Visual highlighting for selected theme chips in deck builder
|
||||
|
||||
### Changed
|
||||
_None_
|
||||
- Migrated 5 templates to new component system (home, 404, 500, setup, commanders)
|
||||
- **Type Checking Configuration**: Improved Python code quality tooling
|
||||
- Configured type checker for better error detection
|
||||
- Optimized linting rules for development workflow
|
||||
|
||||
### Fixed
|
||||
- **Template Quality**: Resolved HTML structure issues found by validation tests
|
||||
- Fixed duplicate ID attributes in build wizard and theme picker templates
|
||||
- Removed erroneous block tags from component documentation
|
||||
- Corrected template structure for HTMX fragments
|
||||
- **Code Quality**: Resolved type checking warnings and improved code maintainability
|
||||
- Fixed type annotation inconsistencies
|
||||
- Cleaned up redundant code quality suppressions
|
||||
- Corrected configuration conflicts
|
||||
|
||||
### Removed
|
||||
_None_
|
||||
|
||||
### Fixed
|
||||
_None_
|
||||
|
||||
### Performance
|
||||
_None_
|
||||
- Hot reload for CSS/template changes (no Docker rebuild needed)
|
||||
- Optional image caching reduces Scryfall API calls
|
||||
- Faster page loads with optimized CSS
|
||||
- TypeScript compilation produces optimized JavaScript
|
||||
|
||||
### For Users
|
||||
- Faster card image loading with optional caching
|
||||
- Cleaner, more consistent web UI design
|
||||
- Improved page load performance
|
||||
- More reliable JavaScript behavior
|
||||
|
||||
### Deprecated
|
||||
_None_
|
||||
|
|
|
|||
|
|
@ -283,6 +283,7 @@ See `.env.example` for the full catalog. Common knobs:
|
|||
| `WEB_AUTO_REFRESH_DAYS` | `7` | Refresh `cards.csv` if older than N days. |
|
||||
| `WEB_TAG_PARALLEL` | `1` | Use parallel workers during tagging. |
|
||||
| `WEB_TAG_WORKERS` | `4` | Worker count for parallel tagging. |
|
||||
| `CACHE_CARD_IMAGES` | `0` | Download card images to `card_files/images/` (1=enable, 0=fetch from API on demand). See [Image Caching](docs/IMAGE_CACHING.md). |
|
||||
| `WEB_AUTO_ENFORCE` | `0` | Re-export decks after auto-applying compliance fixes. |
|
||||
| `WEB_THEME_PICKER_DIAGNOSTICS` | `1` | Enable theme diagnostics endpoints. |
|
||||
|
||||
|
|
|
|||
31
Dockerfile
31
Dockerfile
|
|
@ -10,21 +10,42 @@ ENV PYTHONUNBUFFERED=1
|
|||
ARG APP_VERSION=dev
|
||||
ENV APP_VERSION=${APP_VERSION}
|
||||
|
||||
# Install system dependencies if needed
|
||||
# Install system dependencies including Node.js
|
||||
RUN apt-get update && apt-get install -y \
|
||||
gcc \
|
||||
curl \
|
||||
&& curl -fsSL https://deb.nodesource.com/setup_lts.x | bash - \
|
||||
&& apt-get install -y nodejs \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Copy requirements first for better caching
|
||||
# Copy package files for Node.js dependencies
|
||||
COPY package.json package-lock.json* ./
|
||||
|
||||
# Install Node.js dependencies
|
||||
RUN npm install
|
||||
|
||||
# Copy Tailwind/TypeScript config files
|
||||
COPY tailwind.config.js postcss.config.js tsconfig.json ./
|
||||
|
||||
# Copy requirements for Python dependencies (for better caching)
|
||||
COPY requirements.txt .
|
||||
|
||||
# Install Python dependencies
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
# Copy application code
|
||||
# Copy Python application code (includes templates needed for Tailwind)
|
||||
COPY code/ ./code/
|
||||
COPY mypy.ini .
|
||||
|
||||
# Tailwind source is already in code/web/static/tailwind.css from COPY code/
|
||||
# TypeScript sources are in code/web/static/ts/ from COPY code/
|
||||
|
||||
# Force fresh CSS build by removing any copied styles.css
|
||||
RUN rm -f ./code/web/static/styles.css
|
||||
|
||||
# Build CSS and TypeScript
|
||||
RUN npm run build
|
||||
|
||||
# Copy default configs in two locations:
|
||||
# 1) /app/config is the live path (may be overlaid by a volume)
|
||||
# 2) /app/.defaults/config is preserved in the image for first-run seeding when a volume is mounted
|
||||
|
|
@ -36,7 +57,9 @@ RUN mkdir -p owned_cards
|
|||
# Store in /.defaults/card_files so it persists after volume mount
|
||||
RUN mkdir -p /.defaults/card_files
|
||||
# Copy entire card_files directory (will include cache if present, empty if not)
|
||||
COPY card_files/ /.defaults/card_files/
|
||||
# COMMENTED OUT FOR LOCAL DEV: card_files is mounted as volume anyway
|
||||
# Uncomment for production builds or CI/CD
|
||||
# COPY card_files/ /.defaults/card_files/
|
||||
|
||||
# Create necessary directories as mount points
|
||||
RUN mkdir -p deck_files logs csv_files card_files config /.defaults
|
||||
|
|
|
|||
|
|
@ -309,6 +309,7 @@ Most defaults are defined in `docker-compose.yml` and documented in `.env.exampl
|
|||
| `WEB_AUTO_REFRESH_DAYS` | `7` | Refresh `cards.csv` if older than N days. |
|
||||
| `WEB_TAG_PARALLEL` | `1` | Enable parallel tagging workers. |
|
||||
| `WEB_TAG_WORKERS` | `4` | Worker count for tagging (compose default). |
|
||||
| `CACHE_CARD_IMAGES` | `0` | Download card images to `card_files/images/` (1=enable, 0=fetch from API on demand). Requires ~3-6 GB. See [Image Caching](docs/IMAGE_CACHING.md). |
|
||||
| `WEB_AUTO_ENFORCE` | `0` | Auto-apply bracket enforcement after builds. |
|
||||
| `WEB_THEME_PICKER_DIAGNOSTICS` | `1` | Enable theme diagnostics endpoints. |
|
||||
|
||||
|
|
|
|||
|
|
@ -3,36 +3,106 @@
|
|||
## [Unreleased]
|
||||
|
||||
### Summary
|
||||
Major new feature: Build X and Compare with Intelligent Synergy Builder. Run the same deck configuration multiple times to see variance, compare results side-by-side, and create optimized "best-of" decks.
|
||||
Web UI improvements with Tailwind CSS migration, TypeScript conversion, component library, template validation tests, enhanced code quality tools, and optional card image caching for faster performance and better maintainability.
|
||||
|
||||
### Added
|
||||
- **Build X and Compare**: Build 1-10 decks in parallel with same configuration
|
||||
- Side-by-side comparison with card overlap statistics
|
||||
- Smart filtering of guaranteed cards
|
||||
- Rebuild button for quick iterations
|
||||
- ZIP export of all builds
|
||||
- **Synergy Builder**: Create optimized deck from multiple builds
|
||||
- Intelligent scoring (frequency + EDHREC + themes)
|
||||
- Color-coded synergy preview
|
||||
- Full metadata export (CSV/TXT/JSON)
|
||||
- Partner commander support
|
||||
- Feature flag: `ENABLE_BATCH_BUILD` (default: on)
|
||||
- User guide: `docs/user_guides/batch_build_compare.md`
|
||||
- **Template Validation Tests**: Comprehensive test suite ensuring HTML/template quality
|
||||
- Validates Jinja2 syntax and structure
|
||||
- Checks for common HTML issues (duplicate IDs, balanced tags)
|
||||
- Basic accessibility validation
|
||||
- Prevents regression in template quality
|
||||
- **Code Quality Tools**: Enhanced development tooling for maintainability
|
||||
- Automated utilities for code cleanup
|
||||
- Improved type checking configuration
|
||||
- **Card Image Caching**: Optional local image cache for faster card display
|
||||
- Downloads card images from Scryfall bulk data (respects API guidelines)
|
||||
- Graceful fallback to Scryfall API for uncached images
|
||||
- Enabled via `CACHE_CARD_IMAGES=1` environment variable
|
||||
- Integrated with setup/tagging process
|
||||
- Statistics endpoint with intelligent caching (weekly refresh, matching card data staleness)
|
||||
- **Component Library**: Living documentation of reusable UI components at `/docs/components`
|
||||
- Interactive examples of all buttons, modals, forms, cards, and panels
|
||||
- Jinja2 macros for consistent component usage
|
||||
- Component partial templates for reuse across pages
|
||||
- **TypeScript Migration**: Migrated JavaScript codebase to TypeScript for better type safety
|
||||
- Converted `components.js` (376 lines) and `app.js` (1390 lines) to TypeScript
|
||||
- Created shared type definitions for state management, telemetry, HTMX, and UI components
|
||||
- Integrated TypeScript compilation into build process (`npm run build:ts`)
|
||||
- Compiled JavaScript output in `code/web/static/js/` directory
|
||||
- Docker build automatically compiles TypeScript during image creation
|
||||
|
||||
### Changed
|
||||
_None_
|
||||
- **Inline JavaScript Cleanup**: Removed legacy card hover system (~230 lines of unused code)
|
||||
- **JavaScript Consolidation**: Extracted inline scripts to TypeScript modules
|
||||
- Created `cardHover.ts` for unified hover panel functionality
|
||||
- Created `cardImages.ts` for card image loading with automatic retry fallbacks
|
||||
- Reduced inline script size in base template for better maintainability
|
||||
- **Migrated CSS to Tailwind**: Consolidated and unified CSS architecture
|
||||
- Tailwind CSS v3 with custom MTG color palette
|
||||
- PostCSS build pipeline with autoprefixer
|
||||
- Reduced inline styles in templates (moved to shared CSS classes)
|
||||
- Organized CSS into functional sections with clear documentation
|
||||
- **Theme Visual Improvements**: Enhanced readability and consistency across all theme modes
|
||||
- Light mode: Darker text for improved readability, warm earth tone color palette
|
||||
- Dark mode: Refined contrast for better visual hierarchy
|
||||
- High-contrast mode: Optimized for maximum accessibility
|
||||
- Consistent hover states across all interactive elements
|
||||
- Improved visibility of form inputs and controls
|
||||
- **JavaScript Modernization**: Updated to modern JavaScript patterns
|
||||
- Converted `var` declarations to `const`/`let`
|
||||
- Added TypeScript type annotations for better IDE support and error catching
|
||||
- Consolidated event handlers and utility functions
|
||||
- **Docker Build Optimization**: Improved developer experience
|
||||
- Hot reload enabled for templates and static files
|
||||
- Volume mounts for rapid iteration without rebuilds
|
||||
- **Template Modernization**: Migrated templates to use component system
|
||||
- **Type Checking Configuration**: Improved Python code quality tooling
|
||||
- Configured type checker for better error detection
|
||||
- Optimized linting rules for development workflow
|
||||
- **Intelligent Synergy Builder**: Analyze multiple builds and create optimized "best-of" deck
|
||||
- Scores cards by frequency (50%), EDHREC rank (25%), and theme tags (25%)
|
||||
- 10% bonus for cards appearing in 80%+ of builds
|
||||
- Color-coded synergy scores in preview (green=high, red=low)
|
||||
- Partner commander support with combined color identity
|
||||
- Multi-copy card tracking (e.g., 8 Mountains, 7 Islands)
|
||||
- Export synergy deck with full metadata (CSV, TXT, JSON files)
|
||||
- `ENABLE_BATCH_BUILD` environment variable to toggle feature (default: enabled)
|
||||
- Detailed progress logging for multi-build orchestration
|
||||
- User guide: `docs/user_guides/batch_build_compare.md`
|
||||
- **Web UI Component Library**: Standardized UI components for consistent design across all pages
|
||||
- 5 component partial template files (buttons, modals, forms, cards, panels)
|
||||
- ~900 lines of component CSS styles
|
||||
- Interactive JavaScript utilities (components.js)
|
||||
- Living component library page at `/docs/components`
|
||||
- 1600+ lines developer documentation (component_catalog.md)
|
||||
- **Custom UI Enhancements**:
|
||||
- Darker gray styling for home page buttons
|
||||
- Visual highlighting for selected theme chips in deck builder
|
||||
|
||||
### Removed
|
||||
_None_
|
||||
|
||||
### Fixed
|
||||
_None_
|
||||
- **Template Quality**: Resolved HTML structure issues
|
||||
- Fixed duplicate ID attributes in templates
|
||||
- Removed erroneous template block tags
|
||||
- Corrected structure for HTMX fragments
|
||||
- **Code Quality**: Resolved type checking warnings and improved code maintainability
|
||||
- Fixed type annotation inconsistencies
|
||||
- Cleaned up redundant code quality suppressions
|
||||
- Corrected configuration conflicts
|
||||
|
||||
### Performance
|
||||
_None_
|
||||
- Hot reload for CSS/template changes (no Docker rebuild needed)
|
||||
- Optional image caching reduces Scryfall API calls
|
||||
- Faster page loads with optimized CSS
|
||||
- TypeScript compilation produces optimized JavaScript
|
||||
|
||||
### For Users
|
||||
_No changes yet_
|
||||
- Faster card image loading with optional caching
|
||||
- Cleaner, more consistent web UI design
|
||||
- Improved page load performance
|
||||
- More reliable JavaScript behavior
|
||||
|
||||
### Deprecated
|
||||
_None_
|
||||
|
|
|
|||
|
|
@ -4,6 +4,6 @@ __all__ = ['DeckBuilder']
|
|||
def __getattr__(name):
|
||||
# Lazy-load DeckBuilder to avoid side effects during import of submodules
|
||||
if name == 'DeckBuilder':
|
||||
from .builder import DeckBuilder # type: ignore
|
||||
from .builder import DeckBuilder
|
||||
return DeckBuilder
|
||||
raise AttributeError(name)
|
||||
|
|
|
|||
|
|
@ -1,22 +1,18 @@
|
|||
"""Loader for background cards derived from `background_cards.csv`."""
|
||||
"""Loader for background cards derived from all_cards.parquet."""
|
||||
from __future__ import annotations
|
||||
|
||||
import ast
|
||||
import csv
|
||||
import re
|
||||
from dataclasses import dataclass
|
||||
from functools import lru_cache
|
||||
from pathlib import Path
|
||||
import re
|
||||
from typing import Mapping, Tuple
|
||||
from typing import Any, Mapping, Tuple
|
||||
|
||||
from logging_util import get_logger
|
||||
from deck_builder.partner_background_utils import analyze_partner_background
|
||||
from path_util import csv_dir
|
||||
|
||||
LOGGER = get_logger(__name__)
|
||||
|
||||
BACKGROUND_FILENAME = "background_cards.csv"
|
||||
|
||||
|
||||
@dataclass(frozen=True, slots=True)
|
||||
class BackgroundCard:
|
||||
|
|
@ -57,7 +53,7 @@ class BackgroundCatalog:
|
|||
def load_background_cards(
|
||||
source_path: str | Path | None = None,
|
||||
) -> BackgroundCatalog:
|
||||
"""Load and cache background card data."""
|
||||
"""Load and cache background card data from all_cards.parquet."""
|
||||
|
||||
resolved = _resolve_background_path(source_path)
|
||||
try:
|
||||
|
|
@ -65,7 +61,7 @@ def load_background_cards(
|
|||
mtime_ns = getattr(stat, "st_mtime_ns", int(stat.st_mtime * 1_000_000_000))
|
||||
size = stat.st_size
|
||||
except FileNotFoundError:
|
||||
raise FileNotFoundError(f"Background CSV not found at {resolved}") from None
|
||||
raise FileNotFoundError(f"Background data not found at {resolved}") from None
|
||||
|
||||
entries, version = _load_background_cards_cached(str(resolved), mtime_ns)
|
||||
etag = f"{size}-{mtime_ns}-{len(entries)}"
|
||||
|
|
@ -88,46 +84,49 @@ def _load_background_cards_cached(path_str: str, mtime_ns: int) -> Tuple[Tuple[B
|
|||
if not path.exists():
|
||||
return tuple(), "unknown"
|
||||
|
||||
with path.open("r", encoding="utf-8", newline="") as handle:
|
||||
first_line = handle.readline()
|
||||
version = "unknown"
|
||||
if first_line.startswith("#"):
|
||||
version = _parse_version(first_line)
|
||||
else:
|
||||
handle.seek(0)
|
||||
reader = csv.DictReader(handle)
|
||||
if reader.fieldnames is None:
|
||||
return tuple(), version
|
||||
entries = _rows_to_cards(reader)
|
||||
try:
|
||||
import pandas as pd
|
||||
df = pd.read_parquet(path, engine="pyarrow")
|
||||
|
||||
# Filter for background cards
|
||||
if 'isBackground' not in df.columns:
|
||||
LOGGER.warning("isBackground column not found in %s", path)
|
||||
return tuple(), "unknown"
|
||||
|
||||
df_backgrounds = df[df['isBackground']].copy()
|
||||
|
||||
if len(df_backgrounds) == 0:
|
||||
LOGGER.warning("No background cards found in %s", path)
|
||||
return tuple(), "unknown"
|
||||
|
||||
entries = _rows_to_cards(df_backgrounds)
|
||||
version = "parquet"
|
||||
|
||||
except Exception as e:
|
||||
LOGGER.error("Failed to load backgrounds from %s: %s", path, e)
|
||||
return tuple(), "unknown"
|
||||
|
||||
frozen = tuple(entries)
|
||||
return frozen, version
|
||||
|
||||
|
||||
def _resolve_background_path(override: str | Path | None) -> Path:
|
||||
"""Resolve path to all_cards.parquet."""
|
||||
if override:
|
||||
return Path(override).resolve()
|
||||
return (Path(csv_dir()) / BACKGROUND_FILENAME).resolve()
|
||||
# Use card_files/processed/all_cards.parquet
|
||||
return Path("card_files/processed/all_cards.parquet").resolve()
|
||||
|
||||
|
||||
def _parse_version(line: str) -> str:
|
||||
tokens = line.lstrip("# ").strip().split()
|
||||
for token in tokens:
|
||||
if "=" not in token:
|
||||
continue
|
||||
key, value = token.split("=", 1)
|
||||
if key == "version":
|
||||
return value
|
||||
return "unknown"
|
||||
|
||||
|
||||
def _rows_to_cards(reader: csv.DictReader) -> list[BackgroundCard]:
|
||||
def _rows_to_cards(df) -> list[BackgroundCard]:
|
||||
"""Convert DataFrame rows to BackgroundCard objects."""
|
||||
entries: list[BackgroundCard] = []
|
||||
seen: set[str] = set()
|
||||
for raw in reader:
|
||||
if not raw:
|
||||
|
||||
for _, row in df.iterrows():
|
||||
if row.empty:
|
||||
continue
|
||||
card = _row_to_card(raw)
|
||||
card = _row_to_card(row)
|
||||
if card is None:
|
||||
continue
|
||||
key = card.display_name.lower()
|
||||
|
|
@ -135,20 +134,35 @@ def _rows_to_cards(reader: csv.DictReader) -> list[BackgroundCard]:
|
|||
continue
|
||||
seen.add(key)
|
||||
entries.append(card)
|
||||
|
||||
entries.sort(key=lambda card: card.display_name)
|
||||
return entries
|
||||
|
||||
|
||||
def _row_to_card(row: Mapping[str, str]) -> BackgroundCard | None:
|
||||
name = _clean_str(row.get("name"))
|
||||
face_name = _clean_str(row.get("faceName")) or None
|
||||
def _row_to_card(row) -> BackgroundCard | None:
|
||||
"""Convert a DataFrame row to a BackgroundCard."""
|
||||
# Helper to safely get values from DataFrame row
|
||||
def get_val(key: str):
|
||||
try:
|
||||
if hasattr(row, key):
|
||||
val = getattr(row, key)
|
||||
# Handle pandas NA/None
|
||||
if val is None or (hasattr(val, '__class__') and 'NA' in val.__class__.__name__):
|
||||
return None
|
||||
return val
|
||||
return None
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
name = _clean_str(get_val("name"))
|
||||
face_name = _clean_str(get_val("faceName")) or None
|
||||
display = face_name or name
|
||||
if not display:
|
||||
return None
|
||||
|
||||
type_line = _clean_str(row.get("type"))
|
||||
oracle_text = _clean_multiline(row.get("text"))
|
||||
raw_theme_tags = tuple(_parse_literal_list(row.get("themeTags")))
|
||||
type_line = _clean_str(get_val("type"))
|
||||
oracle_text = _clean_multiline(get_val("text"))
|
||||
raw_theme_tags = tuple(_parse_literal_list(get_val("themeTags")))
|
||||
detection = analyze_partner_background(type_line, oracle_text, raw_theme_tags)
|
||||
if not detection.is_background:
|
||||
return None
|
||||
|
|
@ -158,18 +172,18 @@ def _row_to_card(row: Mapping[str, str]) -> BackgroundCard | None:
|
|||
face_name=face_name,
|
||||
display_name=display,
|
||||
slug=_slugify(display),
|
||||
color_identity=_parse_color_list(row.get("colorIdentity")),
|
||||
colors=_parse_color_list(row.get("colors")),
|
||||
mana_cost=_clean_str(row.get("manaCost")),
|
||||
mana_value=_parse_float(row.get("manaValue")),
|
||||
color_identity=_parse_color_list(get_val("colorIdentity")),
|
||||
colors=_parse_color_list(get_val("colors")),
|
||||
mana_cost=_clean_str(get_val("manaCost")),
|
||||
mana_value=_parse_float(get_val("manaValue")),
|
||||
type_line=type_line,
|
||||
oracle_text=oracle_text,
|
||||
keywords=tuple(_split_list(row.get("keywords"))),
|
||||
keywords=tuple(_split_list(get_val("keywords"))),
|
||||
theme_tags=tuple(tag for tag in raw_theme_tags if tag),
|
||||
raw_theme_tags=raw_theme_tags,
|
||||
edhrec_rank=_parse_int(row.get("edhrecRank")),
|
||||
layout=_clean_str(row.get("layout")) or "normal",
|
||||
side=_clean_str(row.get("side")) or None,
|
||||
edhrec_rank=_parse_int(get_val("edhrecRank")),
|
||||
layout=_clean_str(get_val("layout")) or "normal",
|
||||
side=_clean_str(get_val("side")) or None,
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -189,8 +203,19 @@ def _clean_multiline(value: object) -> str:
|
|||
def _parse_literal_list(value: object) -> list[str]:
|
||||
if value is None:
|
||||
return []
|
||||
if isinstance(value, (list, tuple, set)):
|
||||
|
||||
# Check if it's a numpy array (from Parquet/pandas)
|
||||
is_numpy = False
|
||||
try:
|
||||
import numpy as np
|
||||
is_numpy = isinstance(value, np.ndarray)
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
# Handle lists, tuples, sets, and numpy arrays
|
||||
if isinstance(value, (list, tuple, set)) or is_numpy:
|
||||
return [str(item).strip() for item in value if str(item).strip()]
|
||||
|
||||
text = str(value).strip()
|
||||
if not text:
|
||||
return []
|
||||
|
|
@ -205,6 +230,17 @@ def _parse_literal_list(value: object) -> list[str]:
|
|||
|
||||
|
||||
def _split_list(value: object) -> list[str]:
|
||||
# Check if it's a numpy array (from Parquet/pandas)
|
||||
is_numpy = False
|
||||
try:
|
||||
import numpy as np
|
||||
is_numpy = isinstance(value, np.ndarray)
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
if isinstance(value, (list, tuple, set)) or is_numpy:
|
||||
return [str(item).strip() for item in value if str(item).strip()]
|
||||
|
||||
text = _clean_str(value)
|
||||
if not text:
|
||||
return []
|
||||
|
|
@ -213,6 +249,18 @@ def _split_list(value: object) -> list[str]:
|
|||
|
||||
|
||||
def _parse_color_list(value: object) -> Tuple[str, ...]:
|
||||
# Check if it's a numpy array (from Parquet/pandas)
|
||||
is_numpy = False
|
||||
try:
|
||||
import numpy as np
|
||||
is_numpy = isinstance(value, np.ndarray)
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
if isinstance(value, (list, tuple, set)) or is_numpy:
|
||||
parts = [str(item).strip().upper() for item in value if str(item).strip()]
|
||||
return tuple(parts)
|
||||
|
||||
text = _clean_str(value)
|
||||
if not text:
|
||||
return tuple()
|
||||
|
|
|
|||
|
|
@ -95,7 +95,7 @@ class DeckBuilder(
|
|||
# If a seed was assigned pre-init, use it
|
||||
if self.seed is not None:
|
||||
# Import here to avoid any heavy import cycles at module import time
|
||||
from random_util import set_seed as _set_seed # type: ignore
|
||||
from random_util import set_seed as _set_seed
|
||||
self._rng = _set_seed(int(self.seed))
|
||||
else:
|
||||
self._rng = random.Random()
|
||||
|
|
@ -107,7 +107,7 @@ class DeckBuilder(
|
|||
def set_seed(self, seed: int | str) -> None:
|
||||
"""Set deterministic seed for this builder and reset its RNG instance."""
|
||||
try:
|
||||
from random_util import derive_seed_from_string as _derive, set_seed as _set_seed # type: ignore
|
||||
from random_util import derive_seed_from_string as _derive, set_seed as _set_seed
|
||||
s = _derive(seed)
|
||||
self.seed = int(s)
|
||||
self._rng = _set_seed(s)
|
||||
|
|
@ -215,7 +215,7 @@ class DeckBuilder(
|
|||
try:
|
||||
# Compute a quick compliance snapshot here to hint at upcoming enforcement
|
||||
if hasattr(self, 'compute_and_print_compliance') and not getattr(self, 'headless', False):
|
||||
from deck_builder.brackets_compliance import evaluate_deck as _eval # type: ignore
|
||||
from deck_builder.brackets_compliance import evaluate_deck as _eval
|
||||
bracket_key = str(getattr(self, 'bracket_name', '') or getattr(self, 'bracket_level', 'core')).lower()
|
||||
commander = getattr(self, 'commander_name', None)
|
||||
snap = _eval(self.card_library, commander_name=commander, bracket=bracket_key)
|
||||
|
|
@ -240,15 +240,15 @@ class DeckBuilder(
|
|||
csv_path = self.export_decklist_csv()
|
||||
# Persist CSV path immediately (before any later potential exceptions)
|
||||
try:
|
||||
self.last_csv_path = csv_path # type: ignore[attr-defined]
|
||||
self.last_csv_path = csv_path
|
||||
except Exception:
|
||||
pass
|
||||
try:
|
||||
import os as _os
|
||||
base, _ext = _os.path.splitext(_os.path.basename(csv_path))
|
||||
txt_path = self.export_decklist_text(filename=base + '.txt') # type: ignore[attr-defined]
|
||||
txt_path = self.export_decklist_text(filename=base + '.txt')
|
||||
try:
|
||||
self.last_txt_path = txt_path # type: ignore[attr-defined]
|
||||
self.last_txt_path = txt_path
|
||||
except Exception:
|
||||
pass
|
||||
# Display the text file contents for easy copy/paste to online deck builders
|
||||
|
|
@ -256,18 +256,18 @@ class DeckBuilder(
|
|||
# Compute bracket compliance and save a JSON report alongside exports
|
||||
try:
|
||||
if hasattr(self, 'compute_and_print_compliance'):
|
||||
report0 = self.compute_and_print_compliance(base_stem=base) # type: ignore[attr-defined]
|
||||
report0 = self.compute_and_print_compliance(base_stem=base)
|
||||
# If non-compliant and interactive, offer enforcement now
|
||||
try:
|
||||
if isinstance(report0, dict) and report0.get('overall') == 'FAIL' and not getattr(self, 'headless', False):
|
||||
from deck_builder.phases.phase6_reporting import ReportingMixin as _RM # type: ignore
|
||||
from deck_builder.phases.phase6_reporting import ReportingMixin as _RM
|
||||
if isinstance(self, _RM) and hasattr(self, 'enforce_and_reexport'):
|
||||
self.output_func("One or more bracket limits exceeded. Enter to auto-resolve, or Ctrl+C to skip.")
|
||||
try:
|
||||
_ = self.input_func("")
|
||||
except Exception:
|
||||
pass
|
||||
self.enforce_and_reexport(base_stem=base, mode='prompt') # type: ignore[attr-defined]
|
||||
self.enforce_and_reexport(base_stem=base, mode='prompt')
|
||||
except Exception:
|
||||
pass
|
||||
except Exception:
|
||||
|
|
@ -295,12 +295,12 @@ class DeckBuilder(
|
|||
cfg_dir = 'config'
|
||||
if cfg_dir:
|
||||
_os.makedirs(cfg_dir, exist_ok=True)
|
||||
self.export_run_config_json(directory=cfg_dir, filename=base + '.json') # type: ignore[attr-defined]
|
||||
self.export_run_config_json(directory=cfg_dir, filename=base + '.json')
|
||||
if cfg_path_env:
|
||||
cfg_dir2 = _os.path.dirname(cfg_path_env) or '.'
|
||||
cfg_name2 = _os.path.basename(cfg_path_env)
|
||||
_os.makedirs(cfg_dir2, exist_ok=True)
|
||||
self.export_run_config_json(directory=cfg_dir2, filename=cfg_name2) # type: ignore[attr-defined]
|
||||
self.export_run_config_json(directory=cfg_dir2, filename=cfg_name2)
|
||||
except Exception:
|
||||
pass
|
||||
except Exception:
|
||||
|
|
@ -308,8 +308,8 @@ class DeckBuilder(
|
|||
else:
|
||||
# Mark suppression so random flow knows nothing was exported yet
|
||||
try:
|
||||
self.last_csv_path = None # type: ignore[attr-defined]
|
||||
self.last_txt_path = None # type: ignore[attr-defined]
|
||||
self.last_csv_path = None
|
||||
self.last_txt_path = None
|
||||
except Exception:
|
||||
pass
|
||||
# If owned-only and deck not complete, print a note
|
||||
|
|
@ -624,8 +624,8 @@ class DeckBuilder(
|
|||
try:
|
||||
rec.card_library = rec_subset
|
||||
# Export CSV and TXT with suffix
|
||||
rec.export_decklist_csv(directory='deck_files', filename=base_stem + '_recommendations.csv', suppress_output=True) # type: ignore[attr-defined]
|
||||
rec.export_decklist_text(directory='deck_files', filename=base_stem + '_recommendations.txt', suppress_output=True) # type: ignore[attr-defined]
|
||||
rec.export_decklist_csv(directory='deck_files', filename=base_stem + '_recommendations.csv', suppress_output=True)
|
||||
rec.export_decklist_text(directory='deck_files', filename=base_stem + '_recommendations.txt', suppress_output=True)
|
||||
finally:
|
||||
rec.card_library = original_lib
|
||||
# Notify user succinctly
|
||||
|
|
@ -1843,7 +1843,7 @@ class DeckBuilder(
|
|||
from deck_builder import builder_constants as bc
|
||||
from settings import MULTIPLE_COPY_CARDS
|
||||
except Exception:
|
||||
MULTIPLE_COPY_CARDS = [] # type: ignore
|
||||
MULTIPLE_COPY_CARDS = []
|
||||
is_land = 'land' in str(card_type or entry.get('Card Type','')).lower()
|
||||
is_basic = False
|
||||
try:
|
||||
|
|
@ -2353,7 +2353,7 @@ class DeckBuilder(
|
|||
rng = getattr(self, 'rng', None)
|
||||
try:
|
||||
if rng:
|
||||
rng.shuffle(bucket_keys) # type: ignore
|
||||
rng.shuffle(bucket_keys)
|
||||
else:
|
||||
random.shuffle(bucket_keys)
|
||||
except Exception:
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
from typing import Dict, List, Final, Tuple, Union, Callable, Any as _Any
|
||||
from typing import Dict, List, Final, Tuple, Union, Callable, Any
|
||||
from settings import CARD_DATA_COLUMNS as CSV_REQUIRED_COLUMNS # unified
|
||||
from path_util import csv_dir
|
||||
import pandas as pd
|
||||
|
|
@ -21,7 +21,7 @@ DUPLICATE_CARD_FORMAT: Final[str] = '{card_name} x {count}'
|
|||
COMMANDER_CSV_PATH: Final[str] = f"{csv_dir()}/commander_cards.csv"
|
||||
DECK_DIRECTORY = '../deck_files'
|
||||
# M4: Deprecated - Parquet handles types natively (no converters needed)
|
||||
COMMANDER_CONVERTERS: Final[Dict[str, str]] = {
|
||||
COMMANDER_CONVERTERS: Final[Dict[str, Any]] = {
|
||||
'themeTags': ast.literal_eval,
|
||||
'creatureTypes': ast.literal_eval,
|
||||
'roleTags': ast.literal_eval,
|
||||
|
|
@ -140,18 +140,18 @@ OTHER_COLOR_MAP: Final[Dict[str, Tuple[str, List[str], List[str]]]] = {
|
|||
}
|
||||
|
||||
# Card category validation rules
|
||||
CREATURE_VALIDATION_RULES: Final[Dict[str, Dict[str, Union[str, int, float, bool]]]] = {
|
||||
CREATURE_VALIDATION_RULES: Final[Dict[str, Dict[str, Any]]] = {
|
||||
'power': {'type': ('str', 'int', 'float'), 'required': True},
|
||||
'toughness': {'type': ('str', 'int', 'float'), 'required': True},
|
||||
'creatureTypes': {'type': 'list', 'required': True}
|
||||
}
|
||||
|
||||
SPELL_VALIDATION_RULES: Final[Dict[str, Dict[str, Union[str, int, float, bool]]]] = {
|
||||
SPELL_VALIDATION_RULES: Final[Dict[str, Dict[str, Any]]] = {
|
||||
'manaCost': {'type': 'str', 'required': True},
|
||||
'text': {'type': 'str', 'required': True}
|
||||
}
|
||||
|
||||
LAND_VALIDATION_RULES: Final[Dict[str, Dict[str, Union[str, int, float, bool]]]] = {
|
||||
LAND_VALIDATION_RULES: Final[Dict[str, Dict[str, Any]]] = {
|
||||
'type': {'type': ('str', 'object'), 'required': True},
|
||||
'text': {'type': ('str', 'object'), 'required': False}
|
||||
}
|
||||
|
|
@ -526,7 +526,7 @@ CSV_READ_TIMEOUT: Final[int] = 30 # Timeout in seconds for CSV read operations
|
|||
CSV_PROCESSING_BATCH_SIZE: Final[int] = 1000 # Number of rows to process in each batch
|
||||
|
||||
# CSV validation configuration
|
||||
CSV_VALIDATION_RULES: Final[Dict[str, Dict[str, Union[str, int, float]]]] = {
|
||||
CSV_VALIDATION_RULES: Final[Dict[str, Dict[str, Any]]] = {
|
||||
'name': {'type': ('str', 'object'), 'required': True, 'unique': True},
|
||||
'edhrecRank': {'type': ('str', 'int', 'float', 'object'), 'min': 0, 'max': 100000},
|
||||
'manaValue': {'type': ('str', 'int', 'float', 'object'), 'min': 0, 'max': 20},
|
||||
|
|
@ -602,12 +602,12 @@ GAME_CHANGERS: Final[List[str]] = [
|
|||
# - color_identity: list[str] of required color letters (subset must be in commander CI)
|
||||
# - printed_cap: int | None (None means no printed cap)
|
||||
# - exclusive_group: str | None (at most one from the same group)
|
||||
# - triggers: { tags_any: list[str], tags_all: list[str] }
|
||||
# - triggers: { tagsAny: list[str], tags_all: list[str] }
|
||||
# - default_count: int (default 25)
|
||||
# - rec_window: tuple[int,int] (recommendation window)
|
||||
# - thrumming_stone_synergy: bool
|
||||
# - type_hint: 'creature' | 'noncreature'
|
||||
MULTI_COPY_ARCHETYPES: Final[dict[str, dict[str, _Any]]] = {
|
||||
MULTI_COPY_ARCHETYPES: Final[dict[str, dict[str, Any]]] = {
|
||||
'cid_timeless_artificer': {
|
||||
'id': 'cid_timeless_artificer',
|
||||
'name': 'Cid, Timeless Artificer',
|
||||
|
|
@ -615,7 +615,7 @@ MULTI_COPY_ARCHETYPES: Final[dict[str, dict[str, _Any]]] = {
|
|||
'printed_cap': None,
|
||||
'exclusive_group': None,
|
||||
'triggers': {
|
||||
'tags_any': ['artificer kindred', 'hero kindred', 'artifacts matter'],
|
||||
'tagsAny': ['artificer kindred', 'hero kindred', 'artifacts matter'],
|
||||
'tags_all': []
|
||||
},
|
||||
'default_count': 25,
|
||||
|
|
@ -630,7 +630,7 @@ MULTI_COPY_ARCHETYPES: Final[dict[str, dict[str, _Any]]] = {
|
|||
'printed_cap': None,
|
||||
'exclusive_group': None,
|
||||
'triggers': {
|
||||
'tags_any': ['burn','spellslinger','prowess','storm','copy','cascade','impulse draw','treasure','ramp','graveyard','mill','discard','recursion'],
|
||||
'tagsAny': ['burn','spellslinger','prowess','storm','copy','cascade','impulse draw','treasure','ramp','graveyard','mill','discard','recursion'],
|
||||
'tags_all': []
|
||||
},
|
||||
'default_count': 25,
|
||||
|
|
@ -645,7 +645,7 @@ MULTI_COPY_ARCHETYPES: Final[dict[str, dict[str, _Any]]] = {
|
|||
'printed_cap': None,
|
||||
'exclusive_group': None,
|
||||
'triggers': {
|
||||
'tags_any': ['rabbit kindred','tokens matter','aggro'],
|
||||
'tagsAny': ['rabbit kindred','tokens matter','aggro'],
|
||||
'tags_all': []
|
||||
},
|
||||
'default_count': 25,
|
||||
|
|
@ -660,7 +660,7 @@ MULTI_COPY_ARCHETYPES: Final[dict[str, dict[str, _Any]]] = {
|
|||
'printed_cap': None,
|
||||
'exclusive_group': None,
|
||||
'triggers': {
|
||||
'tags_any': ['tokens','tokens matter','go-wide','exile matters','ooze kindred','spells matter','spellslinger','graveyard','mill','discard','recursion','domain','self-mill','delirium','descend'],
|
||||
'tagsAny': ['tokens','tokens matter','go-wide','exile matters','ooze kindred','spells matter','spellslinger','graveyard','mill','discard','recursion','domain','self-mill','delirium','descend'],
|
||||
'tags_all': []
|
||||
},
|
||||
'default_count': 25,
|
||||
|
|
@ -675,7 +675,7 @@ MULTI_COPY_ARCHETYPES: Final[dict[str, dict[str, _Any]]] = {
|
|||
'printed_cap': None,
|
||||
'exclusive_group': 'rats',
|
||||
'triggers': {
|
||||
'tags_any': ['rats','swarm','aristocrats','sacrifice','devotion-b','lifedrain','graveyard','recursion'],
|
||||
'tagsAny': ['rats','swarm','aristocrats','sacrifice','devotion-b','lifedrain','graveyard','recursion'],
|
||||
'tags_all': []
|
||||
},
|
||||
'default_count': 25,
|
||||
|
|
@ -690,7 +690,7 @@ MULTI_COPY_ARCHETYPES: Final[dict[str, dict[str, _Any]]] = {
|
|||
'printed_cap': None,
|
||||
'exclusive_group': 'rats',
|
||||
'triggers': {
|
||||
'tags_any': ['rats','swarm','aristocrats','sacrifice','devotion-b','lifedrain','graveyard','recursion'],
|
||||
'tagsAny': ['rats','swarm','aristocrats','sacrifice','devotion-b','lifedrain','graveyard','recursion'],
|
||||
'tags_all': []
|
||||
},
|
||||
'default_count': 25,
|
||||
|
|
@ -705,7 +705,7 @@ MULTI_COPY_ARCHETYPES: Final[dict[str, dict[str, _Any]]] = {
|
|||
'printed_cap': 7,
|
||||
'exclusive_group': None,
|
||||
'triggers': {
|
||||
'tags_any': ['dwarf kindred','treasure','equipment','tokens','go-wide','tribal'],
|
||||
'tagsAny': ['dwarf kindred','treasure','equipment','tokens','go-wide','tribal'],
|
||||
'tags_all': []
|
||||
},
|
||||
'default_count': 7,
|
||||
|
|
@ -720,7 +720,7 @@ MULTI_COPY_ARCHETYPES: Final[dict[str, dict[str, _Any]]] = {
|
|||
'printed_cap': None,
|
||||
'exclusive_group': None,
|
||||
'triggers': {
|
||||
'tags_any': ['mill','advisor kindred','control','defenders','walls','draw-go'],
|
||||
'tagsAny': ['mill','advisor kindred','control','defenders','walls','draw-go'],
|
||||
'tags_all': []
|
||||
},
|
||||
'default_count': 25,
|
||||
|
|
@ -735,7 +735,7 @@ MULTI_COPY_ARCHETYPES: Final[dict[str, dict[str, _Any]]] = {
|
|||
'printed_cap': None,
|
||||
'exclusive_group': None,
|
||||
'triggers': {
|
||||
'tags_any': ['demon kindred','aristocrats','sacrifice','recursion','lifedrain'],
|
||||
'tagsAny': ['demon kindred','aristocrats','sacrifice','recursion','lifedrain'],
|
||||
'tags_all': []
|
||||
},
|
||||
'default_count': 25,
|
||||
|
|
@ -750,7 +750,7 @@ MULTI_COPY_ARCHETYPES: Final[dict[str, dict[str, _Any]]] = {
|
|||
'printed_cap': 9,
|
||||
'exclusive_group': None,
|
||||
'triggers': {
|
||||
'tags_any': ['wraith kindred','ring','amass','orc','menace','aristocrats','sacrifice','devotion-b'],
|
||||
'tagsAny': ['wraith kindred','ring','amass','orc','menace','aristocrats','sacrifice','devotion-b'],
|
||||
'tags_all': []
|
||||
},
|
||||
'default_count': 9,
|
||||
|
|
@ -765,7 +765,7 @@ MULTI_COPY_ARCHETYPES: Final[dict[str, dict[str, _Any]]] = {
|
|||
'printed_cap': None,
|
||||
'exclusive_group': None,
|
||||
'triggers': {
|
||||
'tags_any': ['bird kindred','aggro'],
|
||||
'tagsAny': ['bird kindred','aggro'],
|
||||
'tags_all': []
|
||||
},
|
||||
'default_count': 25,
|
||||
|
|
@ -780,7 +780,7 @@ MULTI_COPY_ARCHETYPES: Final[dict[str, dict[str, _Any]]] = {
|
|||
'printed_cap': None,
|
||||
'exclusive_group': None,
|
||||
'triggers': {
|
||||
'tags_any': ['aggro','human kindred','knight kindred','historic matters','artifacts matter'],
|
||||
'tagsAny': ['aggro','human kindred','knight kindred','historic matters','artifacts matter'],
|
||||
'tags_all': []
|
||||
},
|
||||
'default_count': 25,
|
||||
|
|
@ -956,3 +956,4 @@ def get_backgrounds(df: pd.DataFrame) -> pd.DataFrame:
|
|||
if 'isBackground' not in df.columns:
|
||||
return pd.DataFrame()
|
||||
return df[df['isBackground'] == True].copy() # noqa: E712
|
||||
|
||||
|
|
|
|||
|
|
@ -62,6 +62,32 @@ def _detect_produces_mana(text: str) -> bool:
|
|||
return False
|
||||
|
||||
|
||||
def _extract_colors_from_land_type(type_line: str) -> List[str]:
|
||||
"""Extract mana colors from basic land types in a type line.
|
||||
|
||||
Args:
|
||||
type_line: Card type line (e.g., "Land — Mountain", "Land — Forest Plains")
|
||||
|
||||
Returns:
|
||||
List of color letters (e.g., ['R'], ['G', 'W'])
|
||||
"""
|
||||
if not isinstance(type_line, str):
|
||||
return []
|
||||
type_lower = type_line.lower()
|
||||
colors = []
|
||||
basic_land_colors = {
|
||||
'plains': 'W',
|
||||
'island': 'U',
|
||||
'swamp': 'B',
|
||||
'mountain': 'R',
|
||||
'forest': 'G',
|
||||
}
|
||||
for land_type, color in basic_land_colors.items():
|
||||
if land_type in type_lower:
|
||||
colors.append(color)
|
||||
return colors
|
||||
|
||||
|
||||
def _resolved_csv_dir(base_dir: str | None = None) -> str:
|
||||
try:
|
||||
if base_dir:
|
||||
|
|
@ -144,7 +170,9 @@ def _load_multi_face_land_map(base_dir: str) -> Dict[str, Dict[str, Any]]:
|
|||
return {}
|
||||
|
||||
# Select only needed columns
|
||||
usecols = ['name', 'layout', 'side', 'type', 'text', 'manaCost', 'manaValue', 'faceName']
|
||||
# M9: Added backType to detect MDFC lands where land is on back face
|
||||
# M9: Added colorIdentity to extract mana colors for MDFC lands
|
||||
usecols = ['name', 'layout', 'side', 'type', 'text', 'manaCost', 'manaValue', 'faceName', 'backType', 'colorIdentity']
|
||||
available_cols = [col for col in usecols if col in df.columns]
|
||||
if not available_cols:
|
||||
return {}
|
||||
|
|
@ -160,7 +188,16 @@ def _load_multi_face_land_map(base_dir: str) -> Dict[str, Dict[str, Any]]:
|
|||
multi_df['type'] = multi_df['type'].fillna('').astype(str)
|
||||
multi_df['side'] = multi_df['side'].fillna('').astype(str)
|
||||
multi_df['text'] = multi_df['text'].fillna('').astype(str)
|
||||
land_rows = multi_df[multi_df['type'].str.contains('land', case=False, na=False)]
|
||||
# M9: Check both type and backType for land faces
|
||||
if 'backType' in multi_df.columns:
|
||||
multi_df['backType'] = multi_df['backType'].fillna('').astype(str)
|
||||
land_mask = (
|
||||
multi_df['type'].str.contains('land', case=False, na=False) |
|
||||
multi_df['backType'].str.contains('land', case=False, na=False)
|
||||
)
|
||||
land_rows = multi_df[land_mask]
|
||||
else:
|
||||
land_rows = multi_df[multi_df['type'].str.contains('land', case=False, na=False)]
|
||||
if land_rows.empty:
|
||||
return {}
|
||||
mapping: Dict[str, Dict[str, Any]] = {}
|
||||
|
|
@ -169,6 +206,78 @@ def _load_multi_face_land_map(base_dir: str) -> Dict[str, Dict[str, Any]]:
|
|||
seen: set[tuple[str, str, str]] = set()
|
||||
front_is_land = False
|
||||
layout_val = ''
|
||||
|
||||
# M9: Handle merged rows with backType
|
||||
if len(group) == 1 and 'backType' in group.columns:
|
||||
row = group.iloc[0]
|
||||
back_type_val = str(row.get('backType', '') or '')
|
||||
if back_type_val and 'land' in back_type_val.lower():
|
||||
# Construct synthetic faces from merged row
|
||||
front_type = str(row.get('type', '') or '')
|
||||
front_text = str(row.get('text', '') or '')
|
||||
mana_cost_val = str(row.get('manaCost', '') or '')
|
||||
mana_value_raw = row.get('manaValue', '')
|
||||
mana_value_val = None
|
||||
try:
|
||||
if mana_value_raw not in (None, ''):
|
||||
mana_value_val = float(mana_value_raw)
|
||||
if math.isnan(mana_value_val):
|
||||
mana_value_val = None
|
||||
except Exception:
|
||||
mana_value_val = None
|
||||
|
||||
# Front face
|
||||
faces.append({
|
||||
'face': str(row.get('faceName', '') or name),
|
||||
'side': 'a',
|
||||
'type': front_type,
|
||||
'text': front_text,
|
||||
'mana_cost': mana_cost_val,
|
||||
'mana_value': mana_value_val,
|
||||
'produces_mana': _detect_produces_mana(front_text),
|
||||
'is_land': 'land' in front_type.lower(),
|
||||
'layout': str(row.get('layout', '') or ''),
|
||||
})
|
||||
|
||||
# Back face (synthesized)
|
||||
# M9: Use colorIdentity column for MDFC land colors (more reliable than parsing type line)
|
||||
color_identity_raw = row.get('colorIdentity', [])
|
||||
if isinstance(color_identity_raw, str):
|
||||
# Handle string format like "['G']" or "G"
|
||||
try:
|
||||
import ast
|
||||
color_identity_raw = ast.literal_eval(color_identity_raw)
|
||||
except Exception:
|
||||
color_identity_raw = [c.strip() for c in color_identity_raw.split(',') if c.strip()]
|
||||
back_face_colors = list(color_identity_raw) if color_identity_raw else []
|
||||
# Fallback to parsing land type if colorIdentity not available
|
||||
if not back_face_colors:
|
||||
back_face_colors = _extract_colors_from_land_type(back_type_val)
|
||||
|
||||
faces.append({
|
||||
'face': name.split(' // ')[1] if ' // ' in name else 'Back',
|
||||
'side': 'b',
|
||||
'type': back_type_val,
|
||||
'text': '', # Not available in merged row
|
||||
'mana_cost': '',
|
||||
'mana_value': None,
|
||||
'produces_mana': True, # Assume land produces mana
|
||||
'is_land': True,
|
||||
'layout': str(row.get('layout', '') or ''),
|
||||
'colors': back_face_colors, # M9: Color information for mana sources
|
||||
})
|
||||
|
||||
front_is_land = 'land' in front_type.lower()
|
||||
layout_val = str(row.get('layout', '') or '')
|
||||
mapping[name] = {
|
||||
'faces': faces,
|
||||
'front_is_land': front_is_land,
|
||||
'layout': layout_val,
|
||||
'colors': back_face_colors, # M9: Store colors at top level for easy access
|
||||
}
|
||||
continue
|
||||
|
||||
# Original logic for multi-row format
|
||||
for _, row in group.iterrows():
|
||||
side_raw = str(row.get('side', '') or '').strip()
|
||||
side_key = side_raw.lower()
|
||||
|
|
@ -316,7 +425,7 @@ def compute_color_source_matrix(card_library: Dict[str, dict], full_df) -> Dict[
|
|||
matrix: Dict[str, Dict[str, int]] = {}
|
||||
lookup = {}
|
||||
if full_df is not None and not getattr(full_df, 'empty', True) and 'name' in full_df.columns:
|
||||
for _, r in full_df.iterrows(): # type: ignore[attr-defined]
|
||||
for _, r in full_df.iterrows():
|
||||
nm = str(r.get('name', ''))
|
||||
if nm and nm not in lookup:
|
||||
lookup[nm] = r
|
||||
|
|
@ -332,8 +441,13 @@ def compute_color_source_matrix(card_library: Dict[str, dict], full_df) -> Dict[
|
|||
if hasattr(row, 'get'):
|
||||
row_type_raw = row.get('type', row.get('type_line', '')) or ''
|
||||
tline_full = str(row_type_raw).lower()
|
||||
# M9: Check backType for MDFC land detection
|
||||
back_type_raw = ''
|
||||
if hasattr(row, 'get'):
|
||||
back_type_raw = row.get('backType', '') or ''
|
||||
back_type = str(back_type_raw).lower()
|
||||
# Land or permanent that could produce mana via text
|
||||
is_land = ('land' in entry_type) or ('land' in tline_full)
|
||||
is_land = ('land' in entry_type) or ('land' in tline_full) or ('land' in back_type)
|
||||
base_is_land = is_land
|
||||
text_field_raw = ''
|
||||
if hasattr(row, 'get'):
|
||||
|
|
@ -363,7 +477,8 @@ def compute_color_source_matrix(card_library: Dict[str, dict], full_df) -> Dict[
|
|||
if face_types or face_texts:
|
||||
is_land = True
|
||||
text_field = text_field_raw.lower().replace('\n', ' ')
|
||||
# Skip obvious non-permanents (rituals etc.)
|
||||
# Skip obvious non-permanents (rituals etc.) - but NOT if any face is a land
|
||||
# M9: If is_land is True (from backType check), we keep it regardless of front face type
|
||||
if (not is_land) and ('instant' in entry_type or 'sorcery' in entry_type or 'instant' in tline_full or 'sorcery' in tline_full):
|
||||
continue
|
||||
# Keep only candidates that are lands OR whose text indicates mana production
|
||||
|
|
@ -437,6 +552,12 @@ def compute_color_source_matrix(card_library: Dict[str, dict], full_df) -> Dict[
|
|||
colors['_dfc_land'] = True
|
||||
if not (base_is_land or dfc_entry.get('front_is_land')):
|
||||
colors['_dfc_counts_as_extra'] = True
|
||||
# M9: Extract colors from DFC face metadata (back face land colors)
|
||||
dfc_colors = dfc_entry.get('colors', [])
|
||||
if dfc_colors:
|
||||
for color in dfc_colors:
|
||||
if color in colors:
|
||||
colors[color] = 1
|
||||
produces_any_color = any(colors[c] for c in ('W', 'U', 'B', 'R', 'G', 'C'))
|
||||
if produces_any_color or colors.get('_dfc_land'):
|
||||
matrix[name] = colors
|
||||
|
|
@ -729,7 +850,7 @@ def select_top_land_candidates(df, already: set[str], basics: set[str], top_n: i
|
|||
out: list[tuple[int,str,str,str]] = []
|
||||
if df is None or getattr(df, 'empty', True):
|
||||
return out
|
||||
for _, row in df.iterrows(): # type: ignore[attr-defined]
|
||||
for _, row in df.iterrows():
|
||||
try:
|
||||
name = str(row.get('name',''))
|
||||
if not name or name in already or name in basics:
|
||||
|
|
@ -993,7 +1114,7 @@ def prefer_owned_first(df, owned_names_lower: set[str], name_col: str = 'name'):
|
|||
# ---------------------------------------------------------------------------
|
||||
# Tag-driven land suggestion helpers
|
||||
# ---------------------------------------------------------------------------
|
||||
def build_tag_driven_suggestions(builder) -> list[dict]: # type: ignore[override]
|
||||
def build_tag_driven_suggestions(builder) -> list[dict]:
|
||||
"""Return a list of suggestion dicts based on selected commander tags.
|
||||
|
||||
Each dict fields:
|
||||
|
|
@ -1081,7 +1202,7 @@ def color_balance_addition_candidates(builder, target_color: str, combined_df) -
|
|||
return []
|
||||
existing = set(builder.card_library.keys())
|
||||
out: list[tuple[str, int]] = []
|
||||
for _, row in combined_df.iterrows(): # type: ignore[attr-defined]
|
||||
for _, row in combined_df.iterrows():
|
||||
name = str(row.get('name', ''))
|
||||
if not name or name in existing or any(name == o[0] for o in out):
|
||||
continue
|
||||
|
|
|
|||
|
|
@ -88,12 +88,12 @@ def _candidate_pool_for_role(builder, role: str) -> List[Tuple[str, dict]]:
|
|||
# Sort by edhrecRank then manaValue
|
||||
try:
|
||||
from . import builder_utils as bu
|
||||
sorted_df = bu.sort_by_priority(pool, ["edhrecRank", "manaValue"]) # type: ignore[attr-defined]
|
||||
sorted_df = bu.sort_by_priority(pool, ["edhrecRank", "manaValue"])
|
||||
# Prefer-owned bias
|
||||
if getattr(builder, "prefer_owned", False):
|
||||
owned = getattr(builder, "owned_card_names", None)
|
||||
if owned:
|
||||
sorted_df = bu.prefer_owned_first(sorted_df, {str(n).lower() for n in owned}) # type: ignore[attr-defined]
|
||||
sorted_df = bu.prefer_owned_first(sorted_df, {str(n).lower() for n in owned})
|
||||
except Exception:
|
||||
sorted_df = pool
|
||||
|
||||
|
|
@ -363,7 +363,7 @@ def enforce_bracket_compliance(builder, mode: str = "prompt") -> Dict:
|
|||
break
|
||||
# Rank candidates: break the most combos first; break ties by worst desirability
|
||||
cand_names = list(freq.keys())
|
||||
cand_names.sort(key=lambda nm: (-int(freq.get(nm, 0)), _score(nm)), reverse=False) # type: ignore[arg-type]
|
||||
cand_names.sort(key=lambda nm: (-int(freq.get(nm, 0)), _score(nm)), reverse=False)
|
||||
removed_any = False
|
||||
for nm in cand_names:
|
||||
if nm in blocked:
|
||||
|
|
|
|||
|
|
@ -17,7 +17,7 @@ from logging_util import get_logger
|
|||
logger = get_logger(__name__)
|
||||
|
||||
try: # Optional pandas import for type checking without heavy dependency at runtime.
|
||||
import pandas as _pd # type: ignore
|
||||
import pandas as _pd
|
||||
except Exception: # pragma: no cover - tests provide DataFrame-like objects.
|
||||
_pd = None # type: ignore
|
||||
|
||||
|
|
@ -267,7 +267,7 @@ def _find_commander_row(df: Any, name: str | None):
|
|||
if not target:
|
||||
return None
|
||||
|
||||
if _pd is not None and isinstance(df, _pd.DataFrame): # type: ignore
|
||||
if _pd is not None and isinstance(df, _pd.DataFrame):
|
||||
columns = [col for col in ("name", "faceName") if col in df.columns]
|
||||
for col in columns:
|
||||
series = df[col].astype(str).str.casefold()
|
||||
|
|
@ -363,7 +363,14 @@ def _normalize_color_identity(value: Any) -> tuple[str, ...]:
|
|||
def _normalize_string_sequence(value: Any) -> tuple[str, ...]:
|
||||
if value is None:
|
||||
return tuple()
|
||||
if isinstance(value, (list, tuple, set)):
|
||||
# Handle numpy arrays, lists, tuples, sets, and other sequences
|
||||
try:
|
||||
import numpy as np
|
||||
is_numpy = isinstance(value, np.ndarray)
|
||||
except ImportError:
|
||||
is_numpy = False
|
||||
|
||||
if isinstance(value, (list, tuple, set)) or is_numpy:
|
||||
items = list(value)
|
||||
else:
|
||||
text = _safe_str(value)
|
||||
|
|
|
|||
|
|
@ -25,11 +25,11 @@ No behavior change intended.
|
|||
|
||||
# Attempt to use a fast fuzzy library; fall back gracefully
|
||||
try:
|
||||
from rapidfuzz import process as rf_process, fuzz as rf_fuzz # type: ignore
|
||||
from rapidfuzz import process as rf_process, fuzz as rf_fuzz
|
||||
_FUZZ_BACKEND = "rapidfuzz"
|
||||
except ImportError: # pragma: no cover - environment dependent
|
||||
try:
|
||||
from fuzzywuzzy import process as fw_process, fuzz as fw_fuzz # type: ignore
|
||||
from fuzzywuzzy import process as fw_process, fuzz as fw_fuzz
|
||||
_FUZZ_BACKEND = "fuzzywuzzy"
|
||||
except ImportError: # pragma: no cover
|
||||
_FUZZ_BACKEND = "difflib"
|
||||
|
|
|
|||
|
|
@ -68,7 +68,7 @@ class CommanderSelectionMixin:
|
|||
out_words[0] = out_words[0][:1].upper() + out_words[0][1:]
|
||||
return ' '.join(out_words)
|
||||
|
||||
def choose_commander(self) -> str: # type: ignore[override]
|
||||
def choose_commander(self) -> str:
|
||||
df = self.load_commander_data()
|
||||
names = df["name"].tolist()
|
||||
while True:
|
||||
|
|
@ -113,7 +113,7 @@ class CommanderSelectionMixin:
|
|||
continue
|
||||
query = self._normalize_commander_query(choice) # treat as new (normalized) query
|
||||
|
||||
def _present_commander_and_confirm(self, df: pd.DataFrame, name: str) -> bool: # type: ignore[override]
|
||||
def _present_commander_and_confirm(self, df: pd.DataFrame, name: str) -> bool:
|
||||
row = df[df["name"] == name].iloc[0]
|
||||
pretty = self._format_commander_pretty(row)
|
||||
self.output_func("\n" + pretty)
|
||||
|
|
@ -126,7 +126,7 @@ class CommanderSelectionMixin:
|
|||
return False
|
||||
self.output_func("Please enter y or n.")
|
||||
|
||||
def _apply_commander_selection(self, row: pd.Series): # type: ignore[override]
|
||||
def _apply_commander_selection(self, row: pd.Series):
|
||||
self.commander_name = row["name"]
|
||||
self.commander_row = row
|
||||
tags_value = row.get("themeTags", [])
|
||||
|
|
@ -136,7 +136,7 @@ class CommanderSelectionMixin:
|
|||
# ---------------------------
|
||||
# Tag Prioritization
|
||||
# ---------------------------
|
||||
def select_commander_tags(self) -> List[str]: # type: ignore[override]
|
||||
def select_commander_tags(self) -> List[str]:
|
||||
if not self.commander_name:
|
||||
self.output_func("No commander chosen yet. Selecting commander first...")
|
||||
self.choose_commander()
|
||||
|
|
@ -173,7 +173,7 @@ class CommanderSelectionMixin:
|
|||
self._update_commander_dict_with_selected_tags()
|
||||
return self.selected_tags
|
||||
|
||||
def _prompt_tag_choice(self, available: List[str], prompt_text: str, allow_stop: bool) -> Optional[str]: # type: ignore[override]
|
||||
def _prompt_tag_choice(self, available: List[str], prompt_text: str, allow_stop: bool) -> Optional[str]:
|
||||
while True:
|
||||
self.output_func("\nCurrent options:")
|
||||
for i, t in enumerate(available, 1):
|
||||
|
|
@ -192,7 +192,7 @@ class CommanderSelectionMixin:
|
|||
return matches[0]
|
||||
self.output_func("Invalid selection. Try again.")
|
||||
|
||||
def _update_commander_dict_with_selected_tags(self): # type: ignore[override]
|
||||
def _update_commander_dict_with_selected_tags(self):
|
||||
if not self.commander_dict and self.commander_row is not None:
|
||||
self._initialize_commander_dict(self.commander_row)
|
||||
if not self.commander_dict:
|
||||
|
|
@ -205,7 +205,7 @@ class CommanderSelectionMixin:
|
|||
# ---------------------------
|
||||
# Power Bracket Selection
|
||||
# ---------------------------
|
||||
def select_power_bracket(self) -> BracketDefinition: # type: ignore[override]
|
||||
def select_power_bracket(self) -> BracketDefinition:
|
||||
if self.bracket_definition:
|
||||
return self.bracket_definition
|
||||
self.output_func("\nChoose Deck Power Bracket:")
|
||||
|
|
@ -229,14 +229,14 @@ class CommanderSelectionMixin:
|
|||
return match
|
||||
self.output_func("Invalid input. Type 1-5 or 'info'.")
|
||||
|
||||
def _print_bracket_details(self): # type: ignore[override]
|
||||
def _print_bracket_details(self):
|
||||
self.output_func("\nBracket Details:")
|
||||
for bd in BRACKET_DEFINITIONS:
|
||||
self.output_func(f"\n[{bd.level}] {bd.name}")
|
||||
self.output_func(bd.long_desc)
|
||||
self.output_func(self._format_limits(bd.limits))
|
||||
|
||||
def _print_selected_bracket_summary(self): # type: ignore[override]
|
||||
def _print_selected_bracket_summary(self):
|
||||
self.output_func("\nBracket Constraints:")
|
||||
if self.bracket_limits:
|
||||
self.output_func(self._format_limits(self.bracket_limits))
|
||||
|
|
|
|||
|
|
@ -22,7 +22,7 @@ Expected attributes / methods on the host DeckBuilder:
|
|||
|
||||
|
||||
class LandBasicsMixin:
|
||||
def add_basic_lands(self): # type: ignore[override]
|
||||
def add_basic_lands(self):
|
||||
"""Add basic (or snow basic) lands based on color identity.
|
||||
|
||||
Logic:
|
||||
|
|
@ -71,8 +71,8 @@ class LandBasicsMixin:
|
|||
basic_min: Optional[int] = None
|
||||
land_total: Optional[int] = None
|
||||
if hasattr(self, 'ideal_counts') and getattr(self, 'ideal_counts'):
|
||||
basic_min = self.ideal_counts.get('basic_lands') # type: ignore[attr-defined]
|
||||
land_total = self.ideal_counts.get('lands') # type: ignore[attr-defined]
|
||||
basic_min = self.ideal_counts.get('basic_lands')
|
||||
land_total = self.ideal_counts.get('lands')
|
||||
if basic_min is None:
|
||||
basic_min = getattr(bc, 'DEFAULT_BASIC_LAND_COUNT', 20)
|
||||
if land_total is None:
|
||||
|
|
@ -136,7 +136,7 @@ class LandBasicsMixin:
|
|||
self.output_func(f" {name.ljust(width)} : {cnt}")
|
||||
self.output_func(f" Total Basics : {sum(allocation.values())} (Target {target_basics}, Min {basic_min})")
|
||||
|
||||
def run_land_step1(self): # type: ignore[override]
|
||||
def run_land_step1(self):
|
||||
"""Public wrapper to execute land building step 1 (basics)."""
|
||||
self.add_basic_lands()
|
||||
try:
|
||||
|
|
|
|||
|
|
@ -21,7 +21,7 @@ Host DeckBuilder must provide:
|
|||
"""
|
||||
|
||||
class LandDualsMixin:
|
||||
def add_dual_lands(self, requested_count: int | None = None): # type: ignore[override]
|
||||
def add_dual_lands(self, requested_count: int | None = None):
|
||||
"""Add two-color 'typed' dual lands based on color identity."""
|
||||
if not getattr(self, 'files_to_load', []):
|
||||
try:
|
||||
|
|
@ -117,10 +117,10 @@ class LandDualsMixin:
|
|||
pair_buckets[key] = names
|
||||
min_basic_cfg = getattr(bc, 'DEFAULT_BASIC_LAND_COUNT', 20)
|
||||
if getattr(self, 'ideal_counts', None):
|
||||
min_basic_cfg = self.ideal_counts.get('basic_lands', min_basic_cfg) # type: ignore[attr-defined]
|
||||
basic_floor = self._basic_floor(min_basic_cfg) # type: ignore[attr-defined]
|
||||
min_basic_cfg = self.ideal_counts.get('basic_lands', min_basic_cfg)
|
||||
basic_floor = self._basic_floor(min_basic_cfg)
|
||||
default_dual_target = getattr(bc, 'DUAL_LAND_DEFAULT_COUNT', 6)
|
||||
remaining_capacity = max(0, land_target - self._current_land_count()) # type: ignore[attr-defined]
|
||||
remaining_capacity = max(0, land_target - self._current_land_count())
|
||||
effective_default = min(default_dual_target, remaining_capacity if remaining_capacity>0 else len(pool), len(pool))
|
||||
desired = effective_default if requested_count is None else max(0, int(requested_count))
|
||||
if desired == 0:
|
||||
|
|
@ -129,14 +129,14 @@ class LandDualsMixin:
|
|||
if remaining_capacity == 0 and desired > 0:
|
||||
slots_needed = desired
|
||||
freed_slots = 0
|
||||
while freed_slots < slots_needed and self._count_basic_lands() > basic_floor: # type: ignore[attr-defined]
|
||||
target_basic = self._choose_basic_to_trim() # type: ignore[attr-defined]
|
||||
if not target_basic or not self._decrement_card(target_basic): # type: ignore[attr-defined]
|
||||
while freed_slots < slots_needed and self._count_basic_lands() > basic_floor:
|
||||
target_basic = self._choose_basic_to_trim()
|
||||
if not target_basic or not self._decrement_card(target_basic):
|
||||
break
|
||||
freed_slots += 1
|
||||
if freed_slots == 0:
|
||||
desired = 0
|
||||
remaining_capacity = max(0, land_target - self._current_land_count()) # type: ignore[attr-defined]
|
||||
remaining_capacity = max(0, land_target - self._current_land_count())
|
||||
desired = min(desired, remaining_capacity, len(pool))
|
||||
if desired <= 0:
|
||||
self.output_func("Dual Lands: No capacity after trimming; skipping.")
|
||||
|
|
@ -146,7 +146,7 @@ class LandDualsMixin:
|
|||
rng = getattr(self, 'rng', None)
|
||||
try:
|
||||
if rng:
|
||||
rng.shuffle(bucket_keys) # type: ignore
|
||||
rng.shuffle(bucket_keys)
|
||||
else:
|
||||
random.shuffle(bucket_keys)
|
||||
except Exception:
|
||||
|
|
@ -171,7 +171,7 @@ class LandDualsMixin:
|
|||
break
|
||||
added: List[str] = []
|
||||
for name in chosen:
|
||||
if self._current_land_count() >= land_target: # type: ignore[attr-defined]
|
||||
if self._current_land_count() >= land_target:
|
||||
break
|
||||
# Determine sub_role as concatenated color pair for traceability
|
||||
try:
|
||||
|
|
@ -198,7 +198,7 @@ class LandDualsMixin:
|
|||
role='dual',
|
||||
sub_role=sub_role,
|
||||
added_by='lands_step5'
|
||||
) # type: ignore[attr-defined]
|
||||
)
|
||||
added.append(name)
|
||||
self.output_func("\nDual Lands Added (Step 5):")
|
||||
if not added:
|
||||
|
|
@ -207,11 +207,11 @@ class LandDualsMixin:
|
|||
width = max(len(n) for n in added)
|
||||
for n in added:
|
||||
self.output_func(f" {n.ljust(width)} : 1")
|
||||
self.output_func(f" Land Count Now : {self._current_land_count()} / {land_target}") # type: ignore[attr-defined]
|
||||
self.output_func(f" Land Count Now : {self._current_land_count()} / {land_target}")
|
||||
|
||||
def run_land_step5(self, requested_count: int | None = None): # type: ignore[override]
|
||||
def run_land_step5(self, requested_count: int | None = None):
|
||||
self.add_dual_lands(requested_count=requested_count)
|
||||
self._enforce_land_cap(step_label="Duals (Step 5)") # type: ignore[attr-defined]
|
||||
self._enforce_land_cap(step_label="Duals (Step 5)")
|
||||
try:
|
||||
from .. import builder_utils as _bu
|
||||
_bu.export_current_land_pool(self, '5')
|
||||
|
|
|
|||
|
|
@ -19,7 +19,7 @@ Host DeckBuilder must supply:
|
|||
"""
|
||||
|
||||
class LandFetchMixin:
|
||||
def add_fetch_lands(self, requested_count: int | None = None): # type: ignore[override]
|
||||
def add_fetch_lands(self, requested_count: int | None = None):
|
||||
"""Add fetch lands (color-specific + generic) respecting land target."""
|
||||
if not getattr(self, 'files_to_load', []):
|
||||
try:
|
||||
|
|
@ -28,8 +28,8 @@ class LandFetchMixin:
|
|||
except Exception as e: # pragma: no cover - defensive
|
||||
self.output_func(f"Cannot add fetch lands until color identity resolved: {e}")
|
||||
return
|
||||
land_target = (getattr(self, 'ideal_counts', {}).get('lands') if getattr(self, 'ideal_counts', None) else None) or getattr(bc, 'DEFAULT_LAND_COUNT', 35) # type: ignore[attr-defined]
|
||||
current = self._current_land_count() # type: ignore[attr-defined]
|
||||
land_target = (getattr(self, 'ideal_counts', {}).get('lands') if getattr(self, 'ideal_counts', None) else None) or getattr(bc, 'DEFAULT_LAND_COUNT', 35)
|
||||
current = self._current_land_count()
|
||||
color_order = [c for c in getattr(self, 'color_identity', []) if c in ['W','U','B','R','G']]
|
||||
color_map = getattr(bc, 'COLOR_TO_FETCH_LANDS', {})
|
||||
candidates: List[str] = []
|
||||
|
|
@ -56,7 +56,7 @@ class LandFetchMixin:
|
|||
self.output_func("\nAdd Fetch Lands (Step 4):")
|
||||
self.output_func("Fetch lands help fix colors & enable landfall / graveyard synergies.")
|
||||
prompt = f"Enter desired number of fetch lands (default: {effective_default}):"
|
||||
desired = self._prompt_int_with_default(prompt + ' ', effective_default, minimum=0, maximum=20) # type: ignore[attr-defined]
|
||||
desired = self._prompt_int_with_default(prompt + ' ', effective_default, minimum=0, maximum=20)
|
||||
else:
|
||||
desired = max(0, int(requested_count))
|
||||
if desired > remaining_fetch_slots:
|
||||
|
|
@ -70,20 +70,20 @@ class LandFetchMixin:
|
|||
if remaining_capacity == 0 and desired > 0:
|
||||
min_basic_cfg = getattr(bc, 'DEFAULT_BASIC_LAND_COUNT', 20)
|
||||
if getattr(self, 'ideal_counts', None):
|
||||
min_basic_cfg = self.ideal_counts.get('basic_lands', min_basic_cfg) # type: ignore[attr-defined]
|
||||
floor_basics = self._basic_floor(min_basic_cfg) # type: ignore[attr-defined]
|
||||
min_basic_cfg = self.ideal_counts.get('basic_lands', min_basic_cfg)
|
||||
floor_basics = self._basic_floor(min_basic_cfg)
|
||||
slots_needed = desired
|
||||
while slots_needed > 0 and self._count_basic_lands() > floor_basics: # type: ignore[attr-defined]
|
||||
target_basic = self._choose_basic_to_trim() # type: ignore[attr-defined]
|
||||
if not target_basic or not self._decrement_card(target_basic): # type: ignore[attr-defined]
|
||||
while slots_needed > 0 and self._count_basic_lands() > floor_basics:
|
||||
target_basic = self._choose_basic_to_trim()
|
||||
if not target_basic or not self._decrement_card(target_basic):
|
||||
break
|
||||
slots_needed -= 1
|
||||
remaining_capacity = max(0, land_target - self._current_land_count()) # type: ignore[attr-defined]
|
||||
remaining_capacity = max(0, land_target - self._current_land_count())
|
||||
if remaining_capacity > 0 and slots_needed == 0:
|
||||
break
|
||||
if slots_needed > 0 and remaining_capacity == 0:
|
||||
desired -= slots_needed
|
||||
remaining_capacity = max(0, land_target - self._current_land_count()) # type: ignore[attr-defined]
|
||||
remaining_capacity = max(0, land_target - self._current_land_count())
|
||||
desired = min(desired, remaining_capacity, len(candidates), remaining_fetch_slots)
|
||||
if desired <= 0:
|
||||
self.output_func("Fetch Lands: No capacity (after trimming) or desired reduced to 0; skipping.")
|
||||
|
|
@ -101,7 +101,7 @@ class LandFetchMixin:
|
|||
if k >= len(pool):
|
||||
return pool.copy()
|
||||
try:
|
||||
return (rng.sample if rng else random.sample)(pool, k) # type: ignore
|
||||
return (rng.sample if rng else random.sample)(pool, k)
|
||||
except Exception:
|
||||
return pool[:k]
|
||||
need = desired
|
||||
|
|
@ -117,7 +117,7 @@ class LandFetchMixin:
|
|||
|
||||
added: List[str] = []
|
||||
for nm in chosen:
|
||||
if self._current_land_count() >= land_target: # type: ignore[attr-defined]
|
||||
if self._current_land_count() >= land_target:
|
||||
break
|
||||
note = 'generic' if nm in generic_list else 'color-specific'
|
||||
self.add_card(
|
||||
|
|
@ -126,11 +126,11 @@ class LandFetchMixin:
|
|||
role='fetch',
|
||||
sub_role=note,
|
||||
added_by='lands_step4'
|
||||
) # type: ignore[attr-defined]
|
||||
)
|
||||
added.append(nm)
|
||||
# Record actual number of fetch lands added for export/replay context
|
||||
try:
|
||||
setattr(self, 'fetch_count', len(added)) # type: ignore[attr-defined]
|
||||
setattr(self, 'fetch_count', len(added))
|
||||
except Exception:
|
||||
pass
|
||||
self.output_func("\nFetch Lands Added (Step 4):")
|
||||
|
|
@ -141,9 +141,9 @@ class LandFetchMixin:
|
|||
for n in added:
|
||||
note = 'generic' if n in generic_list else 'color-specific'
|
||||
self.output_func(f" {n.ljust(width)} : 1 ({note})")
|
||||
self.output_func(f" Land Count Now : {self._current_land_count()} / {land_target}") # type: ignore[attr-defined]
|
||||
self.output_func(f" Land Count Now : {self._current_land_count()} / {land_target}")
|
||||
|
||||
def run_land_step4(self, requested_count: int | None = None): # type: ignore[override]
|
||||
def run_land_step4(self, requested_count: int | None = None):
|
||||
"""Public wrapper to add fetch lands.
|
||||
|
||||
If ideal_counts['fetch_lands'] is set, it will be used to bypass the prompt in both CLI and web builds.
|
||||
|
|
@ -155,7 +155,7 @@ class LandFetchMixin:
|
|||
except Exception:
|
||||
desired = requested_count
|
||||
self.add_fetch_lands(requested_count=desired)
|
||||
self._enforce_land_cap(step_label="Fetch (Step 4)") # type: ignore[attr-defined]
|
||||
self._enforce_land_cap(step_label="Fetch (Step 4)")
|
||||
try:
|
||||
from .. import builder_utils as _bu
|
||||
_bu.export_current_land_pool(self, '4')
|
||||
|
|
|
|||
|
|
@ -20,7 +20,7 @@ Host DeckBuilder must provide:
|
|||
"""
|
||||
|
||||
class LandKindredMixin:
|
||||
def add_kindred_lands(self): # type: ignore[override]
|
||||
def add_kindred_lands(self):
|
||||
"""Add kindred-oriented lands ONLY if a selected tag includes 'Kindred' or 'Tribal'.
|
||||
|
||||
Baseline inclusions on kindred focus:
|
||||
|
|
@ -41,32 +41,32 @@ class LandKindredMixin:
|
|||
self.output_func("Kindred Lands: No selected kindred/tribal tag; skipping.")
|
||||
return
|
||||
if hasattr(self, 'ideal_counts') and getattr(self, 'ideal_counts'):
|
||||
land_target = self.ideal_counts.get('lands', getattr(bc, 'DEFAULT_LAND_COUNT', 35)) # type: ignore[attr-defined]
|
||||
land_target = self.ideal_counts.get('lands', getattr(bc, 'DEFAULT_LAND_COUNT', 35))
|
||||
else:
|
||||
land_target = getattr(bc, 'DEFAULT_LAND_COUNT', 35)
|
||||
min_basic_cfg = getattr(bc, 'DEFAULT_BASIC_LAND_COUNT', 20)
|
||||
if hasattr(self, 'ideal_counts') and getattr(self, 'ideal_counts'):
|
||||
min_basic_cfg = self.ideal_counts.get('basic_lands', min_basic_cfg) # type: ignore[attr-defined]
|
||||
basic_floor = self._basic_floor(min_basic_cfg) # type: ignore[attr-defined]
|
||||
min_basic_cfg = self.ideal_counts.get('basic_lands', min_basic_cfg)
|
||||
basic_floor = self._basic_floor(min_basic_cfg)
|
||||
|
||||
def ensure_capacity() -> bool:
|
||||
if self._current_land_count() < land_target: # type: ignore[attr-defined]
|
||||
if self._current_land_count() < land_target:
|
||||
return True
|
||||
if self._count_basic_lands() <= basic_floor: # type: ignore[attr-defined]
|
||||
if self._count_basic_lands() <= basic_floor:
|
||||
return False
|
||||
target_basic = self._choose_basic_to_trim() # type: ignore[attr-defined]
|
||||
target_basic = self._choose_basic_to_trim()
|
||||
if not target_basic:
|
||||
return False
|
||||
if not self._decrement_card(target_basic): # type: ignore[attr-defined]
|
||||
if not self._decrement_card(target_basic):
|
||||
return False
|
||||
return self._current_land_count() < land_target # type: ignore[attr-defined]
|
||||
return self._current_land_count() < land_target
|
||||
|
||||
colors = getattr(self, 'color_identity', []) or []
|
||||
added: List[str] = []
|
||||
reasons: Dict[str, str] = {}
|
||||
|
||||
def try_add(name: str, reason: str):
|
||||
if name in self.card_library: # type: ignore[attr-defined]
|
||||
if name in self.card_library:
|
||||
return
|
||||
if not ensure_capacity():
|
||||
return
|
||||
|
|
@ -77,7 +77,7 @@ class LandKindredMixin:
|
|||
sub_role='baseline' if reason.startswith('kindred focus') else 'tribe-specific',
|
||||
added_by='lands_step3',
|
||||
trigger_tag='Kindred/Tribal'
|
||||
) # type: ignore[attr-defined]
|
||||
)
|
||||
added.append(name)
|
||||
reasons[name] = reason
|
||||
|
||||
|
|
@ -105,14 +105,14 @@ class LandKindredMixin:
|
|||
if snapshot is not None and not snapshot.empty and tribe_terms:
|
||||
dynamic_limit = 5
|
||||
for tribe in sorted(tribe_terms):
|
||||
if self._current_land_count() >= land_target or dynamic_limit <= 0: # type: ignore[attr-defined]
|
||||
if self._current_land_count() >= land_target or dynamic_limit <= 0:
|
||||
break
|
||||
tribe_lower = tribe.lower()
|
||||
matches: List[str] = []
|
||||
for _, row in snapshot.iterrows():
|
||||
try:
|
||||
nm = str(row.get('name', ''))
|
||||
if not nm or nm in self.card_library: # type: ignore[attr-defined]
|
||||
if not nm or nm in self.card_library:
|
||||
continue
|
||||
tline = str(row.get('type', row.get('type_line', ''))).lower()
|
||||
if 'land' not in tline:
|
||||
|
|
@ -125,7 +125,7 @@ class LandKindredMixin:
|
|||
except Exception:
|
||||
continue
|
||||
for nm in matches[:2]:
|
||||
if self._current_land_count() >= land_target or dynamic_limit <= 0: # type: ignore[attr-defined]
|
||||
if self._current_land_count() >= land_target or dynamic_limit <= 0:
|
||||
break
|
||||
if nm in added or nm in getattr(bc, 'BASIC_LANDS', []):
|
||||
continue
|
||||
|
|
@ -139,12 +139,12 @@ class LandKindredMixin:
|
|||
width = max(len(n) for n in added)
|
||||
for n in added:
|
||||
self.output_func(f" {n.ljust(width)} : 1 ({reasons.get(n,'')})")
|
||||
self.output_func(f" Land Count Now : {self._current_land_count()} / {land_target}") # type: ignore[attr-defined]
|
||||
self.output_func(f" Land Count Now : {self._current_land_count()} / {land_target}")
|
||||
|
||||
def run_land_step3(self): # type: ignore[override]
|
||||
def run_land_step3(self):
|
||||
"""Public wrapper to add kindred-focused lands."""
|
||||
self.add_kindred_lands()
|
||||
self._enforce_land_cap(step_label="Kindred (Step 3)") # type: ignore[attr-defined]
|
||||
self._enforce_land_cap(step_label="Kindred (Step 3)")
|
||||
try:
|
||||
from .. import builder_utils as _bu
|
||||
_bu.export_current_land_pool(self, '3')
|
||||
|
|
|
|||
|
|
@ -19,7 +19,7 @@ class LandMiscUtilityMixin:
|
|||
- Diagnostics & CSV exports
|
||||
"""
|
||||
|
||||
def add_misc_utility_lands(self, requested_count: Optional[int] = None): # type: ignore[override]
|
||||
def add_misc_utility_lands(self, requested_count: Optional[int] = None):
|
||||
# --- Initialization & candidate collection ---
|
||||
if not getattr(self, 'files_to_load', None):
|
||||
try:
|
||||
|
|
@ -293,7 +293,7 @@ class LandMiscUtilityMixin:
|
|||
if getattr(self, 'show_diagnostics', False) and filtered_out:
|
||||
self.output_func(f" (Mono-color excluded candidates: {', '.join(filtered_out)})")
|
||||
|
||||
def run_land_step7(self, requested_count: Optional[int] = None): # type: ignore[override]
|
||||
def run_land_step7(self, requested_count: Optional[int] = None):
|
||||
self.add_misc_utility_lands(requested_count=requested_count)
|
||||
self._enforce_land_cap(step_label="Utility (Step 7)")
|
||||
self._build_tag_driven_land_suggestions()
|
||||
|
|
@ -305,12 +305,12 @@ class LandMiscUtilityMixin:
|
|||
pass
|
||||
|
||||
# ---- Tag-driven suggestion helpers (used after Step 7) ----
|
||||
def _build_tag_driven_land_suggestions(self): # type: ignore[override]
|
||||
def _build_tag_driven_land_suggestions(self):
|
||||
suggestions = bu.build_tag_driven_suggestions(self)
|
||||
if suggestions:
|
||||
self.suggested_lands_queue.extend(suggestions)
|
||||
|
||||
def _apply_land_suggestions_if_room(self): # type: ignore[override]
|
||||
def _apply_land_suggestions_if_room(self):
|
||||
if not self.suggested_lands_queue:
|
||||
return
|
||||
land_target = getattr(self, 'ideal_counts', {}).get('lands', getattr(bc, 'DEFAULT_LAND_COUNT', 35)) if getattr(self, 'ideal_counts', None) else getattr(bc, 'DEFAULT_LAND_COUNT', 35)
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ class LandOptimizationMixin:
|
|||
Provides optimize_tapped_lands and run_land_step8 (moved from monolithic builder).
|
||||
"""
|
||||
|
||||
def optimize_tapped_lands(self): # type: ignore[override]
|
||||
def optimize_tapped_lands(self):
|
||||
df = getattr(self, '_combined_cards_df', None)
|
||||
if df is None or df.empty:
|
||||
return
|
||||
|
|
@ -146,7 +146,7 @@ class LandOptimizationMixin:
|
|||
new_tapped += 1
|
||||
self.output_func(f" Tapped Lands After : {new_tapped} (threshold {threshold})")
|
||||
|
||||
def run_land_step8(self): # type: ignore[override]
|
||||
def run_land_step8(self):
|
||||
self.optimize_tapped_lands()
|
||||
self._enforce_land_cap(step_label="Tapped Opt (Step 8)")
|
||||
if self.color_source_matrix_baseline is None:
|
||||
|
|
|
|||
|
|
@ -27,10 +27,10 @@ class LandStaplesMixin:
|
|||
# ---------------------------
|
||||
# Land Building Step 2: Staple Nonbasic Lands (NO Kindred yet)
|
||||
# ---------------------------
|
||||
def _current_land_count(self) -> int: # type: ignore[override]
|
||||
def _current_land_count(self) -> int:
|
||||
"""Return total number of land cards currently in the library (counts duplicates)."""
|
||||
total = 0
|
||||
for name, entry in self.card_library.items(): # type: ignore[attr-defined]
|
||||
for name, entry in self.card_library.items():
|
||||
ctype = entry.get('Card Type', '')
|
||||
if ctype and 'land' in ctype.lower():
|
||||
total += entry.get('Count', 1)
|
||||
|
|
@ -47,7 +47,7 @@ class LandStaplesMixin:
|
|||
continue
|
||||
return total
|
||||
|
||||
def add_staple_lands(self): # type: ignore[override]
|
||||
def add_staple_lands(self):
|
||||
"""Add generic staple lands defined in STAPLE_LAND_CONDITIONS (excluding kindred lands).
|
||||
|
||||
Respects total land target (ideal_counts['lands']). Skips additions once target reached.
|
||||
|
|
@ -62,25 +62,25 @@ class LandStaplesMixin:
|
|||
return
|
||||
land_target = None
|
||||
if hasattr(self, 'ideal_counts') and getattr(self, 'ideal_counts'):
|
||||
land_target = self.ideal_counts.get('lands') # type: ignore[attr-defined]
|
||||
land_target = self.ideal_counts.get('lands')
|
||||
if land_target is None:
|
||||
land_target = getattr(bc, 'DEFAULT_LAND_COUNT', 35)
|
||||
min_basic_cfg = getattr(bc, 'DEFAULT_BASIC_LAND_COUNT', 20)
|
||||
if hasattr(self, 'ideal_counts') and getattr(self, 'ideal_counts'):
|
||||
min_basic_cfg = self.ideal_counts.get('basic_lands', min_basic_cfg) # type: ignore[attr-defined]
|
||||
basic_floor = self._basic_floor(min_basic_cfg) # type: ignore[attr-defined]
|
||||
min_basic_cfg = self.ideal_counts.get('basic_lands', min_basic_cfg)
|
||||
basic_floor = self._basic_floor(min_basic_cfg)
|
||||
|
||||
def ensure_capacity() -> bool:
|
||||
if self._current_land_count() < land_target: # type: ignore[attr-defined]
|
||||
if self._current_land_count() < land_target:
|
||||
return True
|
||||
if self._count_basic_lands() <= basic_floor: # type: ignore[attr-defined]
|
||||
if self._count_basic_lands() <= basic_floor:
|
||||
return False
|
||||
target_basic = self._choose_basic_to_trim() # type: ignore[attr-defined]
|
||||
target_basic = self._choose_basic_to_trim()
|
||||
if not target_basic:
|
||||
return False
|
||||
if not self._decrement_card(target_basic): # type: ignore[attr-defined]
|
||||
if not self._decrement_card(target_basic):
|
||||
return False
|
||||
return self._current_land_count() < land_target # type: ignore[attr-defined]
|
||||
return self._current_land_count() < land_target
|
||||
|
||||
commander_tags_all = set(getattr(self, 'commander_tags', []) or []) | set(getattr(self, 'selected_tags', []) or [])
|
||||
colors = getattr(self, 'color_identity', []) or []
|
||||
|
|
@ -102,7 +102,7 @@ class LandStaplesMixin:
|
|||
if not ensure_capacity():
|
||||
self.output_func("Staple Lands: Cannot free capacity without violating basic floor; stopping additions.")
|
||||
break
|
||||
if land_name in self.card_library: # type: ignore[attr-defined]
|
||||
if land_name in self.card_library:
|
||||
continue
|
||||
try:
|
||||
include = cond(list(commander_tags_all), colors, commander_power)
|
||||
|
|
@ -115,7 +115,7 @@ class LandStaplesMixin:
|
|||
role='staple',
|
||||
sub_role='generic-staple',
|
||||
added_by='lands_step2'
|
||||
) # type: ignore[attr-defined]
|
||||
)
|
||||
added.append(land_name)
|
||||
if land_name == 'Command Tower':
|
||||
reasons[land_name] = f"multi-color ({len(colors)} colors)"
|
||||
|
|
@ -137,12 +137,12 @@ class LandStaplesMixin:
|
|||
for n in added:
|
||||
reason = reasons.get(n, '')
|
||||
self.output_func(f" {n.ljust(width)} : 1 {('(' + reason + ')') if reason else ''}")
|
||||
self.output_func(f" Land Count Now : {self._current_land_count()} / {land_target}") # type: ignore[attr-defined]
|
||||
self.output_func(f" Land Count Now : {self._current_land_count()} / {land_target}")
|
||||
|
||||
def run_land_step2(self): # type: ignore[override]
|
||||
def run_land_step2(self):
|
||||
"""Public wrapper for adding generic staple nonbasic lands (excluding kindred)."""
|
||||
self.add_staple_lands()
|
||||
self._enforce_land_cap(step_label="Staples (Step 2)") # type: ignore[attr-defined]
|
||||
self._enforce_land_cap(step_label="Staples (Step 2)")
|
||||
try:
|
||||
from .. import builder_utils as _bu
|
||||
_bu.export_current_land_pool(self, '2')
|
||||
|
|
|
|||
|
|
@ -59,7 +59,7 @@ class LandTripleMixin:
|
|||
'forest': 'G',
|
||||
}
|
||||
|
||||
for _, row in df.iterrows(): # type: ignore
|
||||
for _, row in df.iterrows():
|
||||
try:
|
||||
name = str(row.get('name',''))
|
||||
if not name or name in self.card_library:
|
||||
|
|
|
|||
|
|
@ -33,7 +33,7 @@ class CreatureAdditionMixin:
|
|||
self.output_func("Card pool missing 'type' column; cannot add creatures.")
|
||||
return
|
||||
try:
|
||||
context = self.get_theme_context() # type: ignore[attr-defined]
|
||||
context = self.get_theme_context()
|
||||
except Exception:
|
||||
context = None
|
||||
if context is None or not getattr(context, 'ordered_targets', []):
|
||||
|
|
@ -480,7 +480,7 @@ class CreatureAdditionMixin:
|
|||
drop_idx = tags_series.apply(lambda lst, nd=needles: any(any(n in t for n in nd) for t in lst))
|
||||
mask_keep = [mk and (not di) for mk, di in zip(mask_keep, drop_idx.tolist())]
|
||||
try:
|
||||
import pandas as _pd # type: ignore
|
||||
import pandas as _pd
|
||||
mask_keep = _pd.Series(mask_keep, index=df.index)
|
||||
except Exception:
|
||||
pass
|
||||
|
|
|
|||
|
|
@ -78,7 +78,7 @@ class SpellAdditionMixin:
|
|||
# Combine into keep mask
|
||||
mask_keep = [mk and (not di) for mk, di in zip(mask_keep, drop_idx.tolist())]
|
||||
try:
|
||||
import pandas as _pd # type: ignore
|
||||
import pandas as _pd
|
||||
mask_keep = _pd.Series(mask_keep, index=df.index)
|
||||
except Exception:
|
||||
pass
|
||||
|
|
@ -742,7 +742,7 @@ class SpellAdditionMixin:
|
|||
if df is None or df.empty or 'type' not in df.columns:
|
||||
return
|
||||
try:
|
||||
context = self.get_theme_context() # type: ignore[attr-defined]
|
||||
context = self.get_theme_context()
|
||||
except Exception:
|
||||
context = None
|
||||
if context is None or not getattr(context, 'ordered_targets', []):
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@ from ..shared_copy import build_land_headline, dfc_card_note
|
|||
logger = logging_util.logging.getLogger(__name__)
|
||||
|
||||
try:
|
||||
from prettytable import PrettyTable # type: ignore
|
||||
from prettytable import PrettyTable
|
||||
except Exception: # pragma: no cover
|
||||
PrettyTable = None # type: ignore
|
||||
|
||||
|
|
@ -176,7 +176,7 @@ class ReportingMixin:
|
|||
"""
|
||||
try:
|
||||
# Lazy import to avoid cycles
|
||||
from deck_builder.enforcement import enforce_bracket_compliance # type: ignore
|
||||
from deck_builder.enforcement import enforce_bracket_compliance
|
||||
except Exception:
|
||||
self.output_func("Enforcement module unavailable.")
|
||||
return {}
|
||||
|
|
@ -194,7 +194,7 @@ class ReportingMixin:
|
|||
if int(total_cards) < 100 and hasattr(self, 'fill_remaining_theme_spells'):
|
||||
before = int(total_cards)
|
||||
try:
|
||||
self.fill_remaining_theme_spells() # type: ignore[attr-defined]
|
||||
self.fill_remaining_theme_spells()
|
||||
except Exception:
|
||||
pass
|
||||
# Recompute after filler
|
||||
|
|
@ -239,13 +239,13 @@ class ReportingMixin:
|
|||
csv_name = base_stem + ".csv"
|
||||
txt_name = base_stem + ".txt"
|
||||
# Overwrite exports with updated library
|
||||
self.export_decklist_csv(directory='deck_files', filename=csv_name, suppress_output=True) # type: ignore[attr-defined]
|
||||
self.export_decklist_text(directory='deck_files', filename=txt_name, suppress_output=True) # type: ignore[attr-defined]
|
||||
self.export_decklist_csv(directory='deck_files', filename=csv_name, suppress_output=True)
|
||||
self.export_decklist_text(directory='deck_files', filename=txt_name, suppress_output=True)
|
||||
# Re-export the JSON config to reflect any changes from enforcement
|
||||
json_name = base_stem + ".json"
|
||||
self.export_run_config_json(directory='config', filename=json_name, suppress_output=True) # type: ignore[attr-defined]
|
||||
self.export_run_config_json(directory='config', filename=json_name, suppress_output=True)
|
||||
# Recompute and write compliance next to them
|
||||
self.compute_and_print_compliance(base_stem=base_stem) # type: ignore[attr-defined]
|
||||
self.compute_and_print_compliance(base_stem=base_stem)
|
||||
# Inject enforcement details into the saved compliance JSON for UI transparency
|
||||
comp_path = _os.path.join('deck_files', f"{base_stem}_compliance.json")
|
||||
try:
|
||||
|
|
@ -259,18 +259,18 @@ class ReportingMixin:
|
|||
pass
|
||||
else:
|
||||
# Fall back to default export flow
|
||||
csv_path = self.export_decklist_csv() # type: ignore[attr-defined]
|
||||
csv_path = self.export_decklist_csv()
|
||||
try:
|
||||
base, _ = _os.path.splitext(csv_path)
|
||||
base_only = _os.path.basename(base)
|
||||
except Exception:
|
||||
base_only = None
|
||||
self.export_decklist_text(filename=(base_only + '.txt') if base_only else None) # type: ignore[attr-defined]
|
||||
self.export_decklist_text(filename=(base_only + '.txt') if base_only else None)
|
||||
# Re-export JSON config after enforcement changes
|
||||
if base_only:
|
||||
self.export_run_config_json(directory='config', filename=base_only + '.json', suppress_output=True) # type: ignore[attr-defined]
|
||||
self.export_run_config_json(directory='config', filename=base_only + '.json', suppress_output=True)
|
||||
if base_only:
|
||||
self.compute_and_print_compliance(base_stem=base_only) # type: ignore[attr-defined]
|
||||
self.compute_and_print_compliance(base_stem=base_only)
|
||||
# Inject enforcement into written JSON as above
|
||||
try:
|
||||
comp_path = _os.path.join('deck_files', f"{base_only}_compliance.json")
|
||||
|
|
@ -294,7 +294,7 @@ class ReportingMixin:
|
|||
"""
|
||||
try:
|
||||
# Late import to avoid circulars in some environments
|
||||
from deck_builder.brackets_compliance import evaluate_deck # type: ignore
|
||||
from deck_builder.brackets_compliance import evaluate_deck
|
||||
except Exception:
|
||||
self.output_func("Bracket compliance module unavailable.")
|
||||
return {}
|
||||
|
|
@ -373,7 +373,7 @@ class ReportingMixin:
|
|||
full_df = getattr(self, '_full_cards_df', None)
|
||||
combined_df = getattr(self, '_combined_cards_df', None)
|
||||
snapshot = full_df if full_df is not None else combined_df
|
||||
row_lookup: Dict[str, any] = {}
|
||||
row_lookup: Dict[str, Any] = {}
|
||||
if snapshot is not None and hasattr(snapshot, 'empty') and not snapshot.empty and 'name' in snapshot.columns:
|
||||
for _, r in snapshot.iterrows():
|
||||
nm = str(r.get('name'))
|
||||
|
|
@ -429,7 +429,7 @@ class ReportingMixin:
|
|||
|
||||
# Surface land vs. MDFC counts for CLI users to mirror web summary copy
|
||||
try:
|
||||
summary = self.build_deck_summary() # type: ignore[attr-defined]
|
||||
summary = self.build_deck_summary()
|
||||
except Exception:
|
||||
summary = None
|
||||
if isinstance(summary, dict):
|
||||
|
|
@ -483,9 +483,9 @@ class ReportingMixin:
|
|||
full_df = getattr(self, '_full_cards_df', None)
|
||||
combined_df = getattr(self, '_combined_cards_df', None)
|
||||
snapshot = full_df if full_df is not None else combined_df
|
||||
row_lookup: Dict[str, any] = {}
|
||||
row_lookup: Dict[str, Any] = {}
|
||||
if snapshot is not None and not getattr(snapshot, 'empty', True) and 'name' in snapshot.columns:
|
||||
for _, r in snapshot.iterrows(): # type: ignore[attr-defined]
|
||||
for _, r in snapshot.iterrows():
|
||||
nm = str(r.get('name'))
|
||||
if nm and nm not in row_lookup:
|
||||
row_lookup[nm] = r
|
||||
|
|
@ -521,7 +521,7 @@ class ReportingMixin:
|
|||
|
||||
builder_utils_module = None
|
||||
try:
|
||||
from deck_builder import builder_utils as _builder_utils # type: ignore
|
||||
from deck_builder import builder_utils as _builder_utils
|
||||
builder_utils_module = _builder_utils
|
||||
color_matrix = builder_utils_module.compute_color_source_matrix(self.card_library, full_df)
|
||||
except Exception:
|
||||
|
|
@ -543,6 +543,9 @@ class ReportingMixin:
|
|||
mf_info = {}
|
||||
faces_meta = list(mf_info.get('faces', [])) if isinstance(mf_info, dict) else []
|
||||
layout_val = mf_info.get('layout') if isinstance(mf_info, dict) else None
|
||||
# M9: If no colors found from mana production, try extracting from face metadata
|
||||
if not card_colors and isinstance(mf_info, dict):
|
||||
card_colors = list(mf_info.get('colors', []))
|
||||
dfc_land_lookup[name] = {
|
||||
'adds_extra_land': counts_as_extra,
|
||||
'counts_as_land': not counts_as_extra,
|
||||
|
|
@ -681,13 +684,14 @@ class ReportingMixin:
|
|||
'faces': faces_meta,
|
||||
'layout': layout_val,
|
||||
})
|
||||
if adds_extra:
|
||||
dfc_extra_total += copies
|
||||
# M9: Count ALL MDFC lands for land summary
|
||||
dfc_extra_total += copies
|
||||
total_sources = sum(source_counts.values())
|
||||
traditional_lands = type_counts.get('Land', 0)
|
||||
# M9: dfc_extra_total now contains ALL MDFC lands, not just extras
|
||||
land_summary = {
|
||||
'traditional': traditional_lands,
|
||||
'dfc_lands': dfc_extra_total,
|
||||
'dfc_lands': dfc_extra_total, # M9: Count of all MDFC lands
|
||||
'with_dfc': traditional_lands + dfc_extra_total,
|
||||
'dfc_cards': dfc_details,
|
||||
'headline': build_land_headline(traditional_lands, dfc_extra_total, traditional_lands + dfc_extra_total),
|
||||
|
|
@ -852,7 +856,7 @@ class ReportingMixin:
|
|||
full_df = getattr(self, '_full_cards_df', None)
|
||||
combined_df = getattr(self, '_combined_cards_df', None)
|
||||
snapshot = full_df if full_df is not None else combined_df
|
||||
row_lookup: Dict[str, any] = {}
|
||||
row_lookup: Dict[str, Any] = {}
|
||||
if snapshot is not None and not snapshot.empty and 'name' in snapshot.columns:
|
||||
for _, r in snapshot.iterrows():
|
||||
nm = str(r.get('name'))
|
||||
|
|
@ -1124,7 +1128,7 @@ class ReportingMixin:
|
|||
full_df = getattr(self, '_full_cards_df', None)
|
||||
combined_df = getattr(self, '_combined_cards_df', None)
|
||||
snapshot = full_df if full_df is not None else combined_df
|
||||
row_lookup: Dict[str, any] = {}
|
||||
row_lookup: Dict[str, Any] = {}
|
||||
if snapshot is not None and not snapshot.empty and 'name' in snapshot.columns:
|
||||
for _, r in snapshot.iterrows():
|
||||
nm = str(r.get('name'))
|
||||
|
|
@ -1132,7 +1136,7 @@ class ReportingMixin:
|
|||
row_lookup[nm] = r
|
||||
|
||||
try:
|
||||
from deck_builder import builder_utils as _builder_utils # type: ignore
|
||||
from deck_builder import builder_utils as _builder_utils
|
||||
color_matrix = _builder_utils.compute_color_source_matrix(self.card_library, full_df)
|
||||
except Exception:
|
||||
color_matrix = {}
|
||||
|
|
@ -1383,3 +1387,4 @@ class ReportingMixin:
|
|||
"""
|
||||
# Card library printout suppressed; use CSV and text export for card list.
|
||||
pass
|
||||
|
||||
|
|
|
|||
|
|
@ -885,7 +885,7 @@ def _filter_multi(df: pd.DataFrame, primary: Optional[str], secondary: Optional[
|
|||
if index_map is None:
|
||||
_ensure_theme_tag_index(current_df)
|
||||
index_map = current_df.attrs.get("_ltag_index") or {}
|
||||
return index_map # type: ignore[return-value]
|
||||
return index_map
|
||||
|
||||
index_map_all = _get_index_map(df)
|
||||
|
||||
|
|
@ -1047,7 +1047,7 @@ def _check_constraints(candidate_count: int, constraints: Optional[Dict[str, Any
|
|||
if not constraints:
|
||||
return
|
||||
try:
|
||||
req_min = constraints.get("require_min_candidates") # type: ignore[attr-defined]
|
||||
req_min = constraints.get("require_min_candidates")
|
||||
except Exception:
|
||||
req_min = None
|
||||
if req_min is None:
|
||||
|
|
@ -1436,7 +1436,7 @@ def build_random_full_deck(
|
|||
primary_choice_idx, secondary_choice_idx, tertiary_choice_idx = _resolve_theme_choices_for_headless(base.commander, base)
|
||||
|
||||
try:
|
||||
from headless_runner import run as _run # type: ignore
|
||||
from headless_runner import run as _run
|
||||
except Exception as e:
|
||||
return RandomFullBuildResult(
|
||||
seed=base.seed,
|
||||
|
|
@ -1482,7 +1482,7 @@ def build_random_full_deck(
|
|||
summary: Dict[str, Any] | None = None
|
||||
try:
|
||||
if hasattr(builder, 'build_deck_summary'):
|
||||
summary = builder.build_deck_summary() # type: ignore[attr-defined]
|
||||
summary = builder.build_deck_summary()
|
||||
except Exception:
|
||||
summary = None
|
||||
|
||||
|
|
@ -1559,7 +1559,7 @@ def build_random_full_deck(
|
|||
if isinstance(custom_base, str) and custom_base.strip():
|
||||
meta_payload["name"] = custom_base.strip()
|
||||
try:
|
||||
commander_meta = builder.get_commander_export_metadata() # type: ignore[attr-defined]
|
||||
commander_meta = builder.get_commander_export_metadata()
|
||||
except Exception:
|
||||
commander_meta = {}
|
||||
names = commander_meta.get("commander_names") or []
|
||||
|
|
@ -1589,8 +1589,8 @@ def build_random_full_deck(
|
|||
try:
|
||||
import os as _os
|
||||
import json as _json
|
||||
csv_path = getattr(builder, 'last_csv_path', None) # type: ignore[attr-defined]
|
||||
txt_path = getattr(builder, 'last_txt_path', None) # type: ignore[attr-defined]
|
||||
csv_path = getattr(builder, 'last_csv_path', None)
|
||||
txt_path = getattr(builder, 'last_txt_path', None)
|
||||
if csv_path and isinstance(csv_path, str):
|
||||
base_path, _ = _os.path.splitext(csv_path)
|
||||
# If txt missing but expected, look for sibling
|
||||
|
|
@ -1608,7 +1608,7 @@ def build_random_full_deck(
|
|||
# Compute compliance if not already saved
|
||||
try:
|
||||
if hasattr(builder, 'compute_and_print_compliance'):
|
||||
compliance = builder.compute_and_print_compliance(base_stem=_os.path.basename(base_path)) # type: ignore[attr-defined]
|
||||
compliance = builder.compute_and_print_compliance(base_stem=_os.path.basename(base_path))
|
||||
except Exception:
|
||||
compliance = None
|
||||
# Write summary sidecar if missing
|
||||
|
|
@ -1646,7 +1646,7 @@ def build_random_full_deck(
|
|||
csv_path = existing_base
|
||||
base_path, _ = _os.path.splitext(csv_path)
|
||||
else:
|
||||
tmp_csv = builder.export_decklist_csv() # type: ignore[attr-defined]
|
||||
tmp_csv = builder.export_decklist_csv()
|
||||
stem_base, ext = _os.path.splitext(tmp_csv)
|
||||
if stem_base.endswith('_1'):
|
||||
original = stem_base[:-2] + ext
|
||||
|
|
@ -1662,13 +1662,13 @@ def build_random_full_deck(
|
|||
if _os.path.isfile(target_txt):
|
||||
txt_path = target_txt
|
||||
else:
|
||||
tmp_txt = builder.export_decklist_text(filename=_os.path.basename(base_path) + '.txt') # type: ignore[attr-defined]
|
||||
tmp_txt = builder.export_decklist_text(filename=_os.path.basename(base_path) + '.txt')
|
||||
if tmp_txt.endswith('_1.txt') and _os.path.isfile(target_txt):
|
||||
txt_path = target_txt
|
||||
else:
|
||||
txt_path = tmp_txt
|
||||
if hasattr(builder, 'compute_and_print_compliance'):
|
||||
compliance = builder.compute_and_print_compliance(base_stem=_os.path.basename(base_path)) # type: ignore[attr-defined]
|
||||
compliance = builder.compute_and_print_compliance(base_stem=_os.path.basename(base_path))
|
||||
if summary:
|
||||
sidecar = base_path + '.summary.json'
|
||||
if not _os.path.isfile(sidecar):
|
||||
|
|
|
|||
|
|
@ -167,7 +167,7 @@ def _reset_metrics_for_test() -> None:
|
|||
def _sanitize_theme_list(values: Iterable[Any]) -> list[str]:
|
||||
sanitized: list[str] = []
|
||||
seen: set[str] = set()
|
||||
for raw in values or []: # type: ignore[arg-type]
|
||||
for raw in values or []:
|
||||
text = str(raw or "").strip()
|
||||
if not text:
|
||||
continue
|
||||
|
|
|
|||
|
|
@ -183,7 +183,7 @@ def _iter_json_themes(payload: object) -> Iterable[ThemeCatalogEntry]:
|
|||
try:
|
||||
from type_definitions_theme_catalog import ThemeCatalog # pragma: no cover - primary import path
|
||||
except ImportError: # pragma: no cover - fallback when running as package
|
||||
from code.type_definitions_theme_catalog import ThemeCatalog # type: ignore
|
||||
from code.type_definitions_theme_catalog import ThemeCatalog
|
||||
|
||||
try:
|
||||
catalog = ThemeCatalog.model_validate(payload)
|
||||
|
|
|
|||
567
code/file_setup/image_cache.py
Normal file
567
code/file_setup/image_cache.py
Normal file
|
|
@ -0,0 +1,567 @@
|
|||
"""
|
||||
Card image caching system.
|
||||
|
||||
Downloads and manages local cache of Magic: The Gathering card images
|
||||
from Scryfall, with graceful fallback to API when images are missing.
|
||||
|
||||
Features:
|
||||
- Optional caching (disabled by default for open source users)
|
||||
- Uses Scryfall bulk data API (respects rate limits and guidelines)
|
||||
- Downloads from Scryfall CDN (no rate limits on image files)
|
||||
- Progress tracking for long downloads
|
||||
- Resume capability if interrupted
|
||||
- Graceful fallback to API if images missing
|
||||
|
||||
Environment Variables:
|
||||
CACHE_CARD_IMAGES: 1=enable caching, 0=disable (default: 0)
|
||||
|
||||
Image Sizes:
|
||||
- small: 160px width (for list views)
|
||||
- normal: 488px width (for prominent displays, hover previews)
|
||||
|
||||
Directory Structure:
|
||||
card_files/images/small/ - Small thumbnails (~900 MB - 1.5 GB)
|
||||
card_files/images/normal/ - Normal images (~2.4 GB - 4.5 GB)
|
||||
|
||||
See: https://scryfall.com/docs/api
|
||||
"""
|
||||
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import time
|
||||
from pathlib import Path
|
||||
from typing import Any, Optional
|
||||
from urllib.request import Request, urlopen
|
||||
|
||||
from code.file_setup.scryfall_bulk_data import ScryfallBulkDataClient
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Scryfall CDN has no rate limits, but we'll be conservative
|
||||
DOWNLOAD_DELAY = 0.05 # 50ms between image downloads (20 req/sec)
|
||||
|
||||
# Image sizes to cache
|
||||
IMAGE_SIZES = ["small", "normal"]
|
||||
|
||||
# Card name sanitization (filesystem-safe)
|
||||
INVALID_CHARS = r'[<>:"/\\|?*]'
|
||||
|
||||
|
||||
def sanitize_filename(card_name: str) -> str:
|
||||
"""
|
||||
Sanitize card name for use as filename.
|
||||
|
||||
Args:
|
||||
card_name: Original card name
|
||||
|
||||
Returns:
|
||||
Filesystem-safe filename
|
||||
"""
|
||||
# Replace invalid characters with underscore
|
||||
safe_name = re.sub(INVALID_CHARS, "_", card_name)
|
||||
# Remove multiple consecutive underscores
|
||||
safe_name = re.sub(r"_+", "_", safe_name)
|
||||
# Trim leading/trailing underscores
|
||||
safe_name = safe_name.strip("_")
|
||||
return safe_name
|
||||
|
||||
|
||||
class ImageCache:
|
||||
"""Manages local card image cache."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
base_dir: str = "card_files/images",
|
||||
bulk_data_path: str = "card_files/raw/scryfall_bulk_data.json",
|
||||
):
|
||||
"""
|
||||
Initialize image cache.
|
||||
|
||||
Args:
|
||||
base_dir: Base directory for cached images
|
||||
bulk_data_path: Path to Scryfall bulk data JSON
|
||||
"""
|
||||
self.base_dir = Path(base_dir)
|
||||
self.bulk_data_path = Path(bulk_data_path)
|
||||
self.client = ScryfallBulkDataClient()
|
||||
self._last_download_time: float = 0.0
|
||||
|
||||
def is_enabled(self) -> bool:
|
||||
"""Check if image caching is enabled via environment variable."""
|
||||
return os.getenv("CACHE_CARD_IMAGES", "0") == "1"
|
||||
|
||||
def get_image_path(self, card_name: str, size: str = "normal") -> Optional[Path]:
|
||||
"""
|
||||
Get local path to cached image if it exists.
|
||||
|
||||
Args:
|
||||
card_name: Card name
|
||||
size: Image size ('small' or 'normal')
|
||||
|
||||
Returns:
|
||||
Path to cached image, or None if not cached
|
||||
"""
|
||||
if not self.is_enabled():
|
||||
return None
|
||||
|
||||
safe_name = sanitize_filename(card_name)
|
||||
image_path = self.base_dir / size / f"{safe_name}.jpg"
|
||||
|
||||
if image_path.exists():
|
||||
return image_path
|
||||
return None
|
||||
|
||||
def get_image_url(self, card_name: str, size: str = "normal") -> str:
|
||||
"""
|
||||
Get image URL (local path if cached, Scryfall API otherwise).
|
||||
|
||||
Args:
|
||||
card_name: Card name
|
||||
size: Image size ('small' or 'normal')
|
||||
|
||||
Returns:
|
||||
URL or local path to image
|
||||
"""
|
||||
# Check local cache first
|
||||
local_path = self.get_image_path(card_name, size)
|
||||
if local_path:
|
||||
# Return as static file path for web serving
|
||||
return f"/static/card_images/{size}/{sanitize_filename(card_name)}.jpg"
|
||||
|
||||
# Fallback to Scryfall API
|
||||
from urllib.parse import quote
|
||||
card_query = quote(card_name)
|
||||
return f"https://api.scryfall.com/cards/named?fuzzy={card_query}&format=image&version={size}"
|
||||
|
||||
def _rate_limit_wait(self) -> None:
|
||||
"""Wait to respect rate limits between downloads."""
|
||||
elapsed = time.time() - self._last_download_time
|
||||
if elapsed < DOWNLOAD_DELAY:
|
||||
time.sleep(DOWNLOAD_DELAY - elapsed)
|
||||
self._last_download_time = time.time()
|
||||
|
||||
def _download_image(self, image_url: str, output_path: Path) -> bool:
|
||||
"""
|
||||
Download single image from Scryfall CDN.
|
||||
|
||||
Args:
|
||||
image_url: Image URL from bulk data
|
||||
output_path: Local path to save image
|
||||
|
||||
Returns:
|
||||
True if successful, False otherwise
|
||||
"""
|
||||
self._rate_limit_wait()
|
||||
|
||||
try:
|
||||
# Ensure output directory exists
|
||||
output_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
req = Request(image_url)
|
||||
req.add_header("User-Agent", "MTG-Deckbuilder/3.0 (Image Cache)")
|
||||
|
||||
with urlopen(req, timeout=30) as response:
|
||||
image_data = response.read()
|
||||
with open(output_path, "wb") as f:
|
||||
f.write(image_data)
|
||||
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.debug(f"Failed to download {image_url}: {e}")
|
||||
# Clean up partial download
|
||||
if output_path.exists():
|
||||
output_path.unlink()
|
||||
return False
|
||||
|
||||
def _load_bulk_data(self) -> list[dict[str, Any]]:
|
||||
"""
|
||||
Load card data from bulk data JSON.
|
||||
|
||||
Returns:
|
||||
List of card objects with image URLs
|
||||
|
||||
Raises:
|
||||
FileNotFoundError: If bulk data file doesn't exist
|
||||
json.JSONDecodeError: If file is invalid JSON
|
||||
"""
|
||||
if not self.bulk_data_path.exists():
|
||||
raise FileNotFoundError(
|
||||
f"Bulk data file not found: {self.bulk_data_path}. "
|
||||
"Run download_bulk_data() first."
|
||||
)
|
||||
|
||||
logger.info(f"Loading bulk data from {self.bulk_data_path}")
|
||||
with open(self.bulk_data_path, "r", encoding="utf-8") as f:
|
||||
return json.load(f)
|
||||
|
||||
def _filter_to_our_cards(self, bulk_cards: list[dict[str, Any]]) -> list[dict[str, Any]]:
|
||||
"""
|
||||
Filter bulk data to only cards in our all_cards.parquet file.
|
||||
Deduplicates by card name (takes first printing only).
|
||||
|
||||
Args:
|
||||
bulk_cards: Full Scryfall bulk data
|
||||
|
||||
Returns:
|
||||
Filtered list of cards matching our dataset (one per unique name)
|
||||
"""
|
||||
try:
|
||||
import pandas as pd
|
||||
from code.path_util import get_processed_cards_path
|
||||
|
||||
# Load our card names
|
||||
parquet_path = get_processed_cards_path()
|
||||
df = pd.read_parquet(parquet_path, columns=["name"])
|
||||
our_card_names = set(df["name"].str.lower())
|
||||
|
||||
logger.info(f"Filtering {len(bulk_cards)} Scryfall cards to {len(our_card_names)} cards in our dataset")
|
||||
|
||||
# Filter and deduplicate - keep only first printing of each card
|
||||
seen_names = set()
|
||||
filtered = []
|
||||
|
||||
for card in bulk_cards:
|
||||
card_name_lower = card.get("name", "").lower()
|
||||
if card_name_lower in our_card_names and card_name_lower not in seen_names:
|
||||
filtered.append(card)
|
||||
seen_names.add(card_name_lower)
|
||||
|
||||
logger.info(f"Filtered to {len(filtered)} unique cards with image data")
|
||||
return filtered
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f"Could not filter to our cards: {e}. Using all Scryfall cards.")
|
||||
return bulk_cards
|
||||
|
||||
def download_bulk_data(self, progress_callback=None) -> None:
|
||||
"""
|
||||
Download latest Scryfall bulk data JSON.
|
||||
|
||||
Args:
|
||||
progress_callback: Optional callback(bytes_downloaded, total_bytes)
|
||||
|
||||
Raises:
|
||||
Exception: If download fails
|
||||
"""
|
||||
logger.info("Downloading Scryfall bulk data...")
|
||||
self.bulk_data_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
self.client.get_bulk_data(
|
||||
output_path=str(self.bulk_data_path),
|
||||
progress_callback=progress_callback,
|
||||
)
|
||||
logger.info("Bulk data download complete")
|
||||
|
||||
def download_images(
|
||||
self,
|
||||
sizes: Optional[list[str]] = None,
|
||||
progress_callback=None,
|
||||
max_cards: Optional[int] = None,
|
||||
) -> dict[str, int]:
|
||||
"""
|
||||
Download card images from Scryfall CDN.
|
||||
|
||||
Args:
|
||||
sizes: Image sizes to download (default: ['small', 'normal'])
|
||||
progress_callback: Optional callback(current, total, card_name)
|
||||
max_cards: Maximum cards to download (for testing)
|
||||
|
||||
Returns:
|
||||
Dictionary with download statistics
|
||||
|
||||
Raises:
|
||||
FileNotFoundError: If bulk data not available
|
||||
"""
|
||||
if not self.is_enabled():
|
||||
logger.info("Image caching disabled (CACHE_CARD_IMAGES=0)")
|
||||
return {"skipped": 0}
|
||||
|
||||
if sizes is None:
|
||||
sizes = IMAGE_SIZES
|
||||
|
||||
logger.info(f"Starting image download for sizes: {sizes}")
|
||||
|
||||
# Load bulk data and filter to our cards
|
||||
bulk_cards = self._load_bulk_data()
|
||||
cards = self._filter_to_our_cards(bulk_cards)
|
||||
total_cards = len(cards) if max_cards is None else min(max_cards, len(cards))
|
||||
|
||||
stats = {
|
||||
"total": total_cards,
|
||||
"downloaded": 0,
|
||||
"skipped": 0,
|
||||
"failed": 0,
|
||||
}
|
||||
|
||||
for i, card in enumerate(cards[:total_cards]):
|
||||
card_name = card.get("name")
|
||||
if not card_name:
|
||||
stats["skipped"] += 1
|
||||
continue
|
||||
|
||||
# Collect all faces to download (single-faced or multi-faced)
|
||||
faces_to_download = []
|
||||
|
||||
# Check if card has direct image_uris (single-faced card)
|
||||
if card.get("image_uris"):
|
||||
faces_to_download.append({
|
||||
"name": card_name,
|
||||
"image_uris": card["image_uris"],
|
||||
})
|
||||
# Handle double-faced cards (get all faces)
|
||||
elif card.get("card_faces"):
|
||||
for face_idx, face in enumerate(card["card_faces"]):
|
||||
if face.get("image_uris"):
|
||||
# For multi-faced cards, append face name or index
|
||||
face_name = face.get("name", f"{card_name}_face{face_idx}")
|
||||
faces_to_download.append({
|
||||
"name": face_name,
|
||||
"image_uris": face["image_uris"],
|
||||
})
|
||||
|
||||
# Skip if no faces found
|
||||
if not faces_to_download:
|
||||
logger.debug(f"No image URIs for {card_name}")
|
||||
stats["skipped"] += 1
|
||||
continue
|
||||
|
||||
# Download each face in each requested size
|
||||
for face in faces_to_download:
|
||||
face_name = face["name"]
|
||||
image_uris = face["image_uris"]
|
||||
|
||||
for size in sizes:
|
||||
image_url = image_uris.get(size)
|
||||
if not image_url:
|
||||
continue
|
||||
|
||||
# Check if already cached
|
||||
safe_name = sanitize_filename(face_name)
|
||||
output_path = self.base_dir / size / f"{safe_name}.jpg"
|
||||
|
||||
if output_path.exists():
|
||||
stats["skipped"] += 1
|
||||
continue
|
||||
|
||||
# Download image
|
||||
if self._download_image(image_url, output_path):
|
||||
stats["downloaded"] += 1
|
||||
else:
|
||||
stats["failed"] += 1
|
||||
|
||||
# Progress callback
|
||||
if progress_callback:
|
||||
progress_callback(i + 1, total_cards, card_name)
|
||||
|
||||
# Invalidate cached summary since we just downloaded new images
|
||||
self.invalidate_summary_cache()
|
||||
|
||||
logger.info(f"Image download complete: {stats}")
|
||||
return stats
|
||||
|
||||
def cache_statistics(self) -> dict[str, Any]:
|
||||
"""
|
||||
Get statistics about cached images.
|
||||
|
||||
Uses a cached summary.json file to avoid scanning thousands of files.
|
||||
Regenerates summary if it doesn't exist or is stale (based on WEB_AUTO_REFRESH_DAYS,
|
||||
default 7 days, matching the main card data staleness check).
|
||||
|
||||
Returns:
|
||||
Dictionary with cache stats (count, size, etc.)
|
||||
"""
|
||||
stats = {"enabled": self.is_enabled()}
|
||||
|
||||
if not self.is_enabled():
|
||||
return stats
|
||||
|
||||
summary_file = self.base_dir / "summary.json"
|
||||
|
||||
# Get staleness threshold from environment (same as card data check)
|
||||
try:
|
||||
refresh_days = int(os.getenv('WEB_AUTO_REFRESH_DAYS', '7'))
|
||||
except Exception:
|
||||
refresh_days = 7
|
||||
|
||||
if refresh_days <= 0:
|
||||
# Never consider stale
|
||||
refresh_seconds = float('inf')
|
||||
else:
|
||||
refresh_seconds = refresh_days * 24 * 60 * 60 # Convert days to seconds
|
||||
|
||||
# Check if summary exists and is recent (less than refresh_seconds old)
|
||||
use_cached = False
|
||||
if summary_file.exists():
|
||||
try:
|
||||
import time
|
||||
file_age = time.time() - summary_file.stat().st_mtime
|
||||
if file_age < refresh_seconds:
|
||||
use_cached = True
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Try to use cached summary
|
||||
if use_cached:
|
||||
try:
|
||||
import json
|
||||
with summary_file.open('r', encoding='utf-8') as f:
|
||||
cached_stats = json.load(f)
|
||||
stats.update(cached_stats)
|
||||
return stats
|
||||
except Exception as e:
|
||||
logger.warning(f"Could not read cache summary: {e}")
|
||||
|
||||
# Regenerate summary (fast - just count files and estimate size)
|
||||
for size in IMAGE_SIZES:
|
||||
size_dir = self.base_dir / size
|
||||
if size_dir.exists():
|
||||
# Fast count: count .jpg files without statting each one
|
||||
count = sum(1 for _ in size_dir.glob("*.jpg"))
|
||||
|
||||
# Estimate total size based on typical averages to avoid stat() calls
|
||||
# Small images: ~40 KB avg, Normal images: ~100 KB avg
|
||||
avg_size_kb = 40 if size == "small" else 100
|
||||
estimated_size_mb = (count * avg_size_kb) / 1024
|
||||
|
||||
stats[size] = {
|
||||
"count": count,
|
||||
"size_mb": round(estimated_size_mb, 1),
|
||||
}
|
||||
else:
|
||||
stats[size] = {"count": 0, "size_mb": 0.0}
|
||||
|
||||
# Save summary for next time
|
||||
try:
|
||||
import json
|
||||
with summary_file.open('w', encoding='utf-8') as f:
|
||||
json.dump({k: v for k, v in stats.items() if k != "enabled"}, f)
|
||||
except Exception as e:
|
||||
logger.warning(f"Could not write cache summary: {e}")
|
||||
|
||||
return stats
|
||||
|
||||
def invalidate_summary_cache(self) -> None:
|
||||
"""Delete the cached summary file to force regeneration on next call."""
|
||||
if not self.is_enabled():
|
||||
return
|
||||
|
||||
summary_file = self.base_dir / "summary.json"
|
||||
if summary_file.exists():
|
||||
try:
|
||||
summary_file.unlink()
|
||||
logger.debug("Invalidated cache summary file")
|
||||
except Exception as e:
|
||||
logger.warning(f"Could not delete cache summary: {e}")
|
||||
|
||||
|
||||
def main():
|
||||
"""CLI entry point for image caching."""
|
||||
import argparse
|
||||
|
||||
parser = argparse.ArgumentParser(description="Card image cache management")
|
||||
parser.add_argument(
|
||||
"--download",
|
||||
action="store_true",
|
||||
help="Download images from Scryfall",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--stats",
|
||||
action="store_true",
|
||||
help="Show cache statistics",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--max-cards",
|
||||
type=int,
|
||||
help="Maximum cards to download (for testing)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--sizes",
|
||||
nargs="+",
|
||||
default=IMAGE_SIZES,
|
||||
choices=IMAGE_SIZES,
|
||||
help="Image sizes to download",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--force",
|
||||
action="store_true",
|
||||
help="Force re-download of bulk data even if recent",
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
# Setup logging
|
||||
logging.basicConfig(
|
||||
level=logging.INFO,
|
||||
format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
|
||||
)
|
||||
|
||||
cache = ImageCache()
|
||||
|
||||
if args.stats:
|
||||
stats = cache.cache_statistics()
|
||||
print("\nCache Statistics:")
|
||||
print(f" Enabled: {stats['enabled']}")
|
||||
if stats["enabled"]:
|
||||
for size in IMAGE_SIZES:
|
||||
if size in stats:
|
||||
print(
|
||||
f" {size.capitalize()}: {stats[size]['count']} images "
|
||||
f"({stats[size]['size_mb']:.1f} MB)"
|
||||
)
|
||||
|
||||
elif args.download:
|
||||
if not cache.is_enabled():
|
||||
print("Image caching is disabled. Set CACHE_CARD_IMAGES=1 to enable.")
|
||||
return
|
||||
|
||||
# Check if bulk data already exists and is recent (within 24 hours)
|
||||
bulk_data_exists = cache.bulk_data_path.exists()
|
||||
bulk_data_age_hours = None
|
||||
|
||||
if bulk_data_exists:
|
||||
import time
|
||||
age_seconds = time.time() - cache.bulk_data_path.stat().st_mtime
|
||||
bulk_data_age_hours = age_seconds / 3600
|
||||
print(f"Bulk data file exists (age: {bulk_data_age_hours:.1f} hours)")
|
||||
|
||||
# Download bulk data if missing, old, or forced
|
||||
if not bulk_data_exists or bulk_data_age_hours > 24 or args.force:
|
||||
print("Downloading Scryfall bulk data...")
|
||||
|
||||
def bulk_progress(downloaded, total):
|
||||
if total > 0:
|
||||
pct = (downloaded / total) * 100
|
||||
print(f" Progress: {downloaded / 1024 / 1024:.1f} MB / "
|
||||
f"{total / 1024 / 1024:.1f} MB ({pct:.1f}%)", end="\r")
|
||||
|
||||
cache.download_bulk_data(progress_callback=bulk_progress)
|
||||
print("\nBulk data downloaded successfully")
|
||||
else:
|
||||
print("Bulk data is recent, skipping download (use --force to re-download)")
|
||||
|
||||
# Download images
|
||||
print(f"\nDownloading card images (sizes: {', '.join(args.sizes)})...")
|
||||
|
||||
def image_progress(current, total, card_name):
|
||||
pct = (current / total) * 100
|
||||
print(f" Progress: {current}/{total} ({pct:.1f}%) - {card_name}", end="\r")
|
||||
|
||||
stats = cache.download_images(
|
||||
sizes=args.sizes,
|
||||
progress_callback=image_progress,
|
||||
max_cards=args.max_cards,
|
||||
)
|
||||
print("\n\nDownload complete:")
|
||||
print(f" Total: {stats['total']}")
|
||||
print(f" Downloaded: {stats['downloaded']}")
|
||||
print(f" Skipped: {stats['skipped']}")
|
||||
print(f" Failed: {stats['failed']}")
|
||||
|
||||
else:
|
||||
parser.print_help()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
|
@ -40,7 +40,7 @@ from typing import List, Dict, Any
|
|||
|
||||
# Third-party imports (optional)
|
||||
try:
|
||||
import inquirer # type: ignore
|
||||
import inquirer
|
||||
except Exception:
|
||||
inquirer = None # Fallback to simple input-based menu when unavailable
|
||||
import pandas as pd
|
||||
|
|
|
|||
|
|
@ -40,7 +40,7 @@ from typing import List, Dict, Any
|
|||
|
||||
# Third-party imports (optional)
|
||||
try:
|
||||
import inquirer # type: ignore
|
||||
import inquirer
|
||||
except Exception:
|
||||
inquirer = None # Fallback to simple input-based menu when unavailable
|
||||
import pandas as pd
|
||||
|
|
|
|||
169
code/file_setup/scryfall_bulk_data.py
Normal file
169
code/file_setup/scryfall_bulk_data.py
Normal file
|
|
@ -0,0 +1,169 @@
|
|||
"""
|
||||
Scryfall Bulk Data API client.
|
||||
|
||||
Fetches bulk data JSON files from Scryfall's bulk data API, which provides
|
||||
all card information including image URLs without hitting rate limits.
|
||||
|
||||
See: https://scryfall.com/docs/api/bulk-data
|
||||
"""
|
||||
|
||||
import logging
|
||||
import os
|
||||
import time
|
||||
from typing import Any
|
||||
from urllib.request import Request, urlopen
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
BULK_DATA_API_URL = "https://api.scryfall.com/bulk-data"
|
||||
DEFAULT_BULK_TYPE = "default_cards" # All cards in Scryfall's database
|
||||
RATE_LIMIT_DELAY = 0.1 # 100ms between requests (50-100ms per Scryfall guidelines)
|
||||
|
||||
|
||||
class ScryfallBulkDataClient:
|
||||
"""Client for fetching Scryfall bulk data."""
|
||||
|
||||
def __init__(self, rate_limit_delay: float = RATE_LIMIT_DELAY):
|
||||
"""
|
||||
Initialize Scryfall bulk data client.
|
||||
|
||||
Args:
|
||||
rate_limit_delay: Seconds to wait between API requests (default 100ms)
|
||||
"""
|
||||
self.rate_limit_delay = rate_limit_delay
|
||||
self._last_request_time: float = 0.0
|
||||
|
||||
def _rate_limit_wait(self) -> None:
|
||||
"""Wait to respect rate limits between API calls."""
|
||||
elapsed = time.time() - self._last_request_time
|
||||
if elapsed < self.rate_limit_delay:
|
||||
time.sleep(self.rate_limit_delay - elapsed)
|
||||
self._last_request_time = time.time()
|
||||
|
||||
def _make_request(self, url: str) -> Any:
|
||||
"""
|
||||
Make HTTP request with rate limiting and error handling.
|
||||
|
||||
Args:
|
||||
url: URL to fetch
|
||||
|
||||
Returns:
|
||||
Parsed JSON response
|
||||
|
||||
Raises:
|
||||
Exception: If request fails after retries
|
||||
"""
|
||||
self._rate_limit_wait()
|
||||
|
||||
try:
|
||||
req = Request(url)
|
||||
req.add_header("User-Agent", "MTG-Deckbuilder/3.0 (Image Cache)")
|
||||
with urlopen(req, timeout=30) as response:
|
||||
import json
|
||||
return json.loads(response.read().decode("utf-8"))
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to fetch {url}: {e}")
|
||||
raise
|
||||
|
||||
def get_bulk_data_info(self, bulk_type: str = DEFAULT_BULK_TYPE) -> dict[str, Any]:
|
||||
"""
|
||||
Get bulk data metadata (download URL, size, last updated).
|
||||
|
||||
Args:
|
||||
bulk_type: Type of bulk data to fetch (default: default_cards)
|
||||
|
||||
Returns:
|
||||
Dictionary with bulk data info including 'download_uri'
|
||||
|
||||
Raises:
|
||||
ValueError: If bulk_type not found
|
||||
Exception: If API request fails
|
||||
"""
|
||||
logger.info(f"Fetching bulk data info for type: {bulk_type}")
|
||||
response = self._make_request(BULK_DATA_API_URL)
|
||||
|
||||
# Find the requested bulk data type
|
||||
for item in response.get("data", []):
|
||||
if item.get("type") == bulk_type:
|
||||
logger.info(
|
||||
f"Found bulk data: {item.get('name')} "
|
||||
f"(size: {item.get('size', 0) / 1024 / 1024:.1f} MB, "
|
||||
f"updated: {item.get('updated_at', 'unknown')})"
|
||||
)
|
||||
return item
|
||||
|
||||
raise ValueError(f"Bulk data type '{bulk_type}' not found")
|
||||
|
||||
def download_bulk_data(
|
||||
self, download_uri: str, output_path: str, progress_callback=None
|
||||
) -> None:
|
||||
"""
|
||||
Download bulk data JSON file.
|
||||
|
||||
Args:
|
||||
download_uri: Direct download URL from get_bulk_data_info()
|
||||
output_path: Local path to save the JSON file
|
||||
progress_callback: Optional callback(bytes_downloaded, total_bytes)
|
||||
|
||||
Raises:
|
||||
Exception: If download fails
|
||||
"""
|
||||
logger.info(f"Downloading bulk data from: {download_uri}")
|
||||
logger.info(f"Saving to: {output_path}")
|
||||
|
||||
# No rate limit on bulk data downloads per Scryfall docs
|
||||
try:
|
||||
req = Request(download_uri)
|
||||
req.add_header("User-Agent", "MTG-Deckbuilder/3.0 (Image Cache)")
|
||||
|
||||
with urlopen(req, timeout=60) as response:
|
||||
total_size = int(response.headers.get("Content-Length", 0))
|
||||
downloaded = 0
|
||||
chunk_size = 1024 * 1024 # 1MB chunks
|
||||
|
||||
# Ensure output directory exists
|
||||
os.makedirs(os.path.dirname(output_path), exist_ok=True)
|
||||
|
||||
with open(output_path, "wb") as f:
|
||||
while True:
|
||||
chunk = response.read(chunk_size)
|
||||
if not chunk:
|
||||
break
|
||||
f.write(chunk)
|
||||
downloaded += len(chunk)
|
||||
if progress_callback:
|
||||
progress_callback(downloaded, total_size)
|
||||
|
||||
logger.info(f"Downloaded {downloaded / 1024 / 1024:.1f} MB successfully")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to download bulk data: {e}")
|
||||
# Clean up partial download
|
||||
if os.path.exists(output_path):
|
||||
os.remove(output_path)
|
||||
raise
|
||||
|
||||
def get_bulk_data(
|
||||
self,
|
||||
bulk_type: str = DEFAULT_BULK_TYPE,
|
||||
output_path: str = "card_files/raw/scryfall_bulk_data.json",
|
||||
progress_callback=None,
|
||||
) -> str:
|
||||
"""
|
||||
Fetch bulk data info and download the JSON file.
|
||||
|
||||
Args:
|
||||
bulk_type: Type of bulk data to fetch
|
||||
output_path: Where to save the JSON file
|
||||
progress_callback: Optional progress callback
|
||||
|
||||
Returns:
|
||||
Path to downloaded file
|
||||
|
||||
Raises:
|
||||
Exception: If fetch or download fails
|
||||
"""
|
||||
info = self.get_bulk_data_info(bulk_type)
|
||||
download_uri = info["download_uri"]
|
||||
self.download_bulk_data(download_uri, output_path, progress_callback)
|
||||
return output_path
|
||||
|
|
@ -350,6 +350,44 @@ def initial_setup() -> None:
|
|||
logger.info(f" Processed: {processed_path}")
|
||||
logger.info("=" * 80)
|
||||
|
||||
# Step 3: Optional image caching (if enabled)
|
||||
try:
|
||||
from code.file_setup.image_cache import ImageCache
|
||||
cache = ImageCache()
|
||||
|
||||
if cache.is_enabled():
|
||||
logger.info("=" * 80)
|
||||
logger.info("Card image caching enabled - starting download")
|
||||
logger.info("=" * 80)
|
||||
|
||||
# Download bulk data
|
||||
logger.info("Downloading Scryfall bulk data...")
|
||||
cache.download_bulk_data()
|
||||
|
||||
# Download images
|
||||
logger.info("Downloading card images (this may take 1-2 hours)...")
|
||||
|
||||
def progress(current, total, card_name):
|
||||
if current % 100 == 0: # Log every 100 cards
|
||||
pct = (current / total) * 100
|
||||
logger.info(f" Progress: {current}/{total} ({pct:.1f}%) - {card_name}")
|
||||
|
||||
stats = cache.download_images(progress_callback=progress)
|
||||
|
||||
logger.info("=" * 80)
|
||||
logger.info("✓ Image cache complete")
|
||||
logger.info(f" Downloaded: {stats['downloaded']}")
|
||||
logger.info(f" Skipped: {stats['skipped']}")
|
||||
logger.info(f" Failed: {stats['failed']}")
|
||||
logger.info("=" * 80)
|
||||
else:
|
||||
logger.info("Card image caching disabled (CACHE_CARD_IMAGES=0)")
|
||||
logger.info("Images will be fetched from Scryfall API on demand")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to cache images (continuing anyway): {e}")
|
||||
logger.error("Images will be fetched from Scryfall API on demand")
|
||||
|
||||
|
||||
def regenerate_processed_parquet() -> None:
|
||||
"""Regenerate processed Parquet from existing raw file.
|
||||
|
|
|
|||
|
|
@ -139,7 +139,7 @@ def _validate_commander_available(command_name: str) -> None:
|
|||
return
|
||||
|
||||
try:
|
||||
from commander_exclusions import lookup_commander_detail as _lookup_commander_detail # type: ignore[import-not-found]
|
||||
from commander_exclusions import lookup_commander_detail as _lookup_commander_detail
|
||||
except ImportError: # pragma: no cover
|
||||
_lookup_commander_detail = None
|
||||
|
||||
|
|
@ -281,12 +281,12 @@ def run(
|
|||
# Optional deterministic seed for Random Modes (does not affect core when unset)
|
||||
try:
|
||||
if seed is not None:
|
||||
builder.set_seed(seed) # type: ignore[attr-defined]
|
||||
builder.set_seed(seed)
|
||||
except Exception:
|
||||
pass
|
||||
# Mark this run as headless so builder can adjust exports and logging
|
||||
try:
|
||||
builder.headless = True # type: ignore[attr-defined]
|
||||
builder.headless = True
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
|
@ -294,9 +294,9 @@ def run(
|
|||
secondary_clean = (secondary_commander or "").strip()
|
||||
background_clean = (background or "").strip()
|
||||
try:
|
||||
builder.partner_feature_enabled = partner_feature_enabled # type: ignore[attr-defined]
|
||||
builder.requested_secondary_commander = secondary_clean or None # type: ignore[attr-defined]
|
||||
builder.requested_background = background_clean or None # type: ignore[attr-defined]
|
||||
builder.partner_feature_enabled = partner_feature_enabled
|
||||
builder.requested_secondary_commander = secondary_clean or None
|
||||
builder.requested_background = background_clean or None
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
|
@ -313,11 +313,11 @@ def run(
|
|||
|
||||
# Configure include/exclude settings (M1: Config + Validation + Persistence)
|
||||
try:
|
||||
builder.include_cards = list(include_cards or []) # type: ignore[attr-defined]
|
||||
builder.exclude_cards = list(exclude_cards or []) # type: ignore[attr-defined]
|
||||
builder.enforcement_mode = enforcement_mode # type: ignore[attr-defined]
|
||||
builder.allow_illegal = allow_illegal # type: ignore[attr-defined]
|
||||
builder.fuzzy_matching = fuzzy_matching # type: ignore[attr-defined]
|
||||
builder.include_cards = list(include_cards or [])
|
||||
builder.exclude_cards = list(exclude_cards or [])
|
||||
builder.enforcement_mode = enforcement_mode
|
||||
builder.allow_illegal = allow_illegal
|
||||
builder.fuzzy_matching = fuzzy_matching
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
|
@ -336,16 +336,16 @@ def run(
|
|||
)
|
||||
|
||||
try:
|
||||
builder.theme_match_mode = theme_resolution.mode # type: ignore[attr-defined]
|
||||
builder.theme_catalog_version = theme_resolution.catalog_version # type: ignore[attr-defined]
|
||||
builder.user_theme_requested = list(theme_resolution.requested) # type: ignore[attr-defined]
|
||||
builder.user_theme_resolved = list(theme_resolution.resolved) # type: ignore[attr-defined]
|
||||
builder.user_theme_matches = list(theme_resolution.matches) # type: ignore[attr-defined]
|
||||
builder.user_theme_unresolved = list(theme_resolution.unresolved) # type: ignore[attr-defined]
|
||||
builder.user_theme_fuzzy_corrections = dict(theme_resolution.fuzzy_corrections) # type: ignore[attr-defined]
|
||||
builder.user_theme_resolution = theme_resolution # type: ignore[attr-defined]
|
||||
builder.theme_match_mode = theme_resolution.mode
|
||||
builder.theme_catalog_version = theme_resolution.catalog_version
|
||||
builder.user_theme_requested = list(theme_resolution.requested)
|
||||
builder.user_theme_resolved = list(theme_resolution.resolved)
|
||||
builder.user_theme_matches = list(theme_resolution.matches)
|
||||
builder.user_theme_unresolved = list(theme_resolution.unresolved)
|
||||
builder.user_theme_fuzzy_corrections = dict(theme_resolution.fuzzy_corrections)
|
||||
builder.user_theme_resolution = theme_resolution
|
||||
if user_theme_weight is not None:
|
||||
builder.user_theme_weight = float(user_theme_weight) # type: ignore[attr-defined]
|
||||
builder.user_theme_weight = float(user_theme_weight)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
|
@ -356,7 +356,7 @@ def run(
|
|||
ic: Dict[str, int] = {}
|
||||
for k, v in ideal_counts.items():
|
||||
try:
|
||||
iv = int(v) if v is not None else None # type: ignore
|
||||
iv = int(v) if v is not None else None
|
||||
except Exception:
|
||||
continue
|
||||
if iv is None:
|
||||
|
|
@ -365,7 +365,7 @@ def run(
|
|||
if k in {"ramp","lands","basic_lands","creatures","removal","wipes","card_advantage","protection"}:
|
||||
ic[k] = iv
|
||||
if ic:
|
||||
builder.ideal_counts.update(ic) # type: ignore[attr-defined]
|
||||
builder.ideal_counts.update(ic)
|
||||
except Exception:
|
||||
pass
|
||||
builder.run_initial_setup()
|
||||
|
|
@ -518,24 +518,24 @@ def _apply_combined_commander_to_builder(builder: DeckBuilder, combined_commande
|
|||
"""Attach combined commander metadata to the builder for downstream use."""
|
||||
|
||||
try:
|
||||
builder.combined_commander = combined_commander # type: ignore[attr-defined]
|
||||
builder.combined_commander = combined_commander
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
try:
|
||||
builder.partner_mode = combined_commander.partner_mode # type: ignore[attr-defined]
|
||||
builder.partner_mode = combined_commander.partner_mode
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
try:
|
||||
builder.secondary_commander = combined_commander.secondary_name # type: ignore[attr-defined]
|
||||
builder.secondary_commander = combined_commander.secondary_name
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
try:
|
||||
builder.combined_color_identity = combined_commander.color_identity # type: ignore[attr-defined]
|
||||
builder.combined_theme_tags = combined_commander.theme_tags # type: ignore[attr-defined]
|
||||
builder.partner_warnings = combined_commander.warnings # type: ignore[attr-defined]
|
||||
builder.combined_color_identity = combined_commander.color_identity
|
||||
builder.combined_theme_tags = combined_commander.theme_tags
|
||||
builder.partner_warnings = combined_commander.warnings
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
|
@ -557,7 +557,7 @@ def _export_outputs(builder: DeckBuilder) -> None:
|
|||
# Persist for downstream reuse (e.g., random_entrypoint / reroll flows) so they don't re-export
|
||||
if csv_path:
|
||||
try:
|
||||
builder.last_csv_path = csv_path # type: ignore[attr-defined]
|
||||
builder.last_csv_path = csv_path
|
||||
except Exception:
|
||||
pass
|
||||
except Exception:
|
||||
|
|
@ -572,7 +572,7 @@ def _export_outputs(builder: DeckBuilder) -> None:
|
|||
finally:
|
||||
if txt_generated:
|
||||
try:
|
||||
builder.last_txt_path = txt_generated # type: ignore[attr-defined]
|
||||
builder.last_txt_path = txt_generated
|
||||
except Exception:
|
||||
pass
|
||||
else:
|
||||
|
|
@ -582,7 +582,7 @@ def _export_outputs(builder: DeckBuilder) -> None:
|
|||
finally:
|
||||
if txt_generated:
|
||||
try:
|
||||
builder.last_txt_path = txt_generated # type: ignore[attr-defined]
|
||||
builder.last_txt_path = txt_generated
|
||||
except Exception:
|
||||
pass
|
||||
except Exception:
|
||||
|
|
@ -1196,7 +1196,7 @@ def _run_random_mode(config: RandomRunConfig) -> int:
|
|||
RandomConstraintsImpossibleError,
|
||||
RandomThemeNoMatchError,
|
||||
build_random_full_deck,
|
||||
) # type: ignore
|
||||
)
|
||||
except Exception as exc:
|
||||
print(f"Random mode unavailable: {exc}")
|
||||
return 1
|
||||
|
|
|
|||
|
|
@ -36,7 +36,7 @@ except Exception: # pragma: no cover
|
|||
|
||||
try:
|
||||
# Support running as `python code/scripts/build_theme_catalog.py` when 'code' already on path
|
||||
from scripts.extract_themes import ( # type: ignore
|
||||
from scripts.extract_themes import (
|
||||
BASE_COLORS,
|
||||
collect_theme_tags_from_constants,
|
||||
collect_theme_tags_from_tagger_source,
|
||||
|
|
@ -51,7 +51,7 @@ try:
|
|||
)
|
||||
except ModuleNotFoundError:
|
||||
# Fallback: direct relative import when running within scripts package context
|
||||
from extract_themes import ( # type: ignore
|
||||
from extract_themes import (
|
||||
BASE_COLORS,
|
||||
collect_theme_tags_from_constants,
|
||||
collect_theme_tags_from_tagger_source,
|
||||
|
|
@ -66,7 +66,7 @@ except ModuleNotFoundError:
|
|||
)
|
||||
|
||||
try:
|
||||
from scripts.export_themes_to_yaml import slugify as slugify_theme # type: ignore
|
||||
from scripts.export_themes_to_yaml import slugify as slugify_theme
|
||||
except Exception:
|
||||
_SLUG_RE = re.compile(r'[^a-z0-9-]')
|
||||
|
||||
|
|
@ -951,7 +951,7 @@ def main(): # pragma: no cover
|
|||
if args.schema:
|
||||
# Lazy import to avoid circular dependency: replicate minimal schema inline from models file if present
|
||||
try:
|
||||
from type_definitions_theme_catalog import ThemeCatalog # type: ignore
|
||||
from type_definitions_theme_catalog import ThemeCatalog
|
||||
import json as _json
|
||||
print(_json.dumps(ThemeCatalog.model_json_schema(), indent=2))
|
||||
return
|
||||
|
|
@ -990,8 +990,8 @@ def main(): # pragma: no cover
|
|||
# Safeguard: if catalog dir missing, attempt to auto-export Phase A YAML first
|
||||
if not CATALOG_DIR.exists(): # pragma: no cover (environmental)
|
||||
try:
|
||||
from scripts.export_themes_to_yaml import main as export_main # type: ignore
|
||||
export_main(['--force']) # type: ignore[arg-type]
|
||||
from scripts.export_themes_to_yaml import main as export_main
|
||||
export_main(['--force'])
|
||||
except Exception as _e:
|
||||
print(f"[build_theme_catalog] WARNING: catalog dir missing and auto export failed: {_e}", file=sys.stderr)
|
||||
if yaml is None:
|
||||
|
|
@ -1013,7 +1013,7 @@ def main(): # pragma: no cover
|
|||
meta_block = raw.get('metadata_info') if isinstance(raw.get('metadata_info'), dict) else {}
|
||||
# Legacy migration: if no metadata_info but legacy provenance present, adopt it
|
||||
if not meta_block and isinstance(raw.get('provenance'), dict):
|
||||
meta_block = raw.get('provenance') # type: ignore
|
||||
meta_block = raw.get('provenance')
|
||||
changed = True
|
||||
if force or not meta_block.get('last_backfill'):
|
||||
meta_block['last_backfill'] = time.strftime('%Y-%m-%dT%H:%M:%S')
|
||||
|
|
|
|||
|
|
@ -41,7 +41,7 @@ SCRIPT_ROOT = Path(__file__).resolve().parent
|
|||
CODE_ROOT = SCRIPT_ROOT.parent
|
||||
if str(CODE_ROOT) not in sys.path:
|
||||
sys.path.insert(0, str(CODE_ROOT))
|
||||
from scripts.extract_themes import derive_synergies_for_tags # type: ignore
|
||||
from scripts.extract_themes import derive_synergies_for_tags
|
||||
|
||||
ROOT = Path(__file__).resolve().parents[2]
|
||||
THEME_JSON = ROOT / 'config' / 'themes' / 'theme_list.json'
|
||||
|
|
|
|||
|
|
@ -18,8 +18,8 @@ ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..'))
|
|||
if ROOT not in sys.path:
|
||||
sys.path.insert(0, ROOT)
|
||||
|
||||
from code.settings import CSV_DIRECTORY # type: ignore
|
||||
from code.tagging import tag_constants # type: ignore
|
||||
from code.settings import CSV_DIRECTORY
|
||||
from code.tagging import tag_constants
|
||||
|
||||
BASE_COLORS = {
|
||||
'white': 'W',
|
||||
|
|
|
|||
|
|
@ -32,7 +32,7 @@ if str(CODE_ROOT) not in sys.path:
|
|||
sys.path.insert(0, str(CODE_ROOT))
|
||||
|
||||
try:
|
||||
from code.settings import CSV_DIRECTORY as DEFAULT_CSV_DIRECTORY # type: ignore
|
||||
from code.settings import CSV_DIRECTORY as DEFAULT_CSV_DIRECTORY
|
||||
except Exception: # pragma: no cover - fallback for adhoc execution
|
||||
DEFAULT_CSV_DIRECTORY = "csv_files"
|
||||
|
||||
|
|
|
|||
|
|
@ -42,7 +42,7 @@ def _sample_combinations(tags: List[str], iterations: int) -> List[Tuple[str | N
|
|||
|
||||
def _collect_tag_pool(df: pd.DataFrame) -> List[str]:
|
||||
tag_pool: set[str] = set()
|
||||
for tags in df.get("_ltags", []): # type: ignore[assignment]
|
||||
for tags in df.get("_ltags", []):
|
||||
if not tags:
|
||||
continue
|
||||
for token in tags:
|
||||
|
|
|
|||
|
|
@ -37,7 +37,7 @@ def _refresh_setup() -> None:
|
|||
|
||||
def _refresh_tags() -> None:
|
||||
tagger = importlib.import_module("code.tagging.tagger")
|
||||
tagger = importlib.reload(tagger) # type: ignore[assignment]
|
||||
tagger = importlib.reload(tagger)
|
||||
for color in SUPPORTED_COLORS:
|
||||
tagger.load_dataframe(color)
|
||||
|
||||
|
|
|
|||
|
|
@ -21,7 +21,7 @@ PROJECT_ROOT = Path(__file__).resolve().parents[1]
|
|||
if str(PROJECT_ROOT) not in sys.path:
|
||||
sys.path.append(str(PROJECT_ROOT))
|
||||
|
||||
from deck_builder.random_entrypoint import ( # type: ignore # noqa: E402
|
||||
from deck_builder.random_entrypoint import ( # noqa: E402
|
||||
_build_random_theme_pool,
|
||||
_ensure_theme_tag_cache,
|
||||
_load_commanders_df,
|
||||
|
|
|
|||
|
|
@ -731,7 +731,7 @@ def main(): # pragma: no cover (script orchestration)
|
|||
if cand:
|
||||
theme_card_hits[display] = cand
|
||||
# Build global duplicate frequency map ONCE (baseline prior to this run) if threshold active
|
||||
if args.common_card_threshold > 0 and 'GLOBAL_CARD_FREQ' not in globals(): # type: ignore
|
||||
if args.common_card_threshold > 0 and 'GLOBAL_CARD_FREQ' not in globals():
|
||||
freq: Dict[str, int] = {}
|
||||
total_themes = 0
|
||||
for fp0 in CATALOG_DIR.glob('*.yml'):
|
||||
|
|
@ -748,10 +748,10 @@ def main(): # pragma: no cover (script orchestration)
|
|||
continue
|
||||
seen_local.add(c)
|
||||
freq[c] = freq.get(c, 0) + 1
|
||||
globals()['GLOBAL_CARD_FREQ'] = (freq, total_themes) # type: ignore
|
||||
globals()['GLOBAL_CARD_FREQ'] = (freq, total_themes)
|
||||
# Apply duplicate filtering to candidate lists (do NOT mutate existing example_cards)
|
||||
if args.common_card_threshold > 0 and 'GLOBAL_CARD_FREQ' in globals(): # type: ignore
|
||||
freq_map, total_prev = globals()['GLOBAL_CARD_FREQ'] # type: ignore
|
||||
if args.common_card_threshold > 0 and 'GLOBAL_CARD_FREQ' in globals():
|
||||
freq_map, total_prev = globals()['GLOBAL_CARD_FREQ']
|
||||
if total_prev > 0: # avoid div-by-zero
|
||||
cutoff = args.common_card_threshold
|
||||
def _filter(lst: List[Tuple[float, str, Set[str]]]) -> List[Tuple[float, str, Set[str]]]:
|
||||
|
|
@ -803,8 +803,8 @@ def main(): # pragma: no cover (script orchestration)
|
|||
print(f"[promote] modified {changed_count} themes")
|
||||
if args.fill_example_cards:
|
||||
print(f"[cards] modified {cards_changed} themes (target {args.cards_target})")
|
||||
if args.print_dup_metrics and 'GLOBAL_CARD_FREQ' in globals(): # type: ignore
|
||||
freq_map, total_prev = globals()['GLOBAL_CARD_FREQ'] # type: ignore
|
||||
if args.print_dup_metrics and 'GLOBAL_CARD_FREQ' in globals():
|
||||
freq_map, total_prev = globals()['GLOBAL_CARD_FREQ']
|
||||
if total_prev:
|
||||
items = sorted(freq_map.items(), key=lambda x: (-x[1], x[0]))[:30]
|
||||
print('[dup-metrics] Top shared example_cards (baseline before this run):')
|
||||
|
|
|
|||
|
|
@ -31,9 +31,9 @@ CODE_ROOT = ROOT / 'code'
|
|||
if str(CODE_ROOT) not in sys.path:
|
||||
sys.path.insert(0, str(CODE_ROOT))
|
||||
|
||||
from type_definitions_theme_catalog import ThemeCatalog, ThemeYAMLFile # type: ignore
|
||||
from scripts.extract_themes import load_whitelist_config # type: ignore
|
||||
from scripts.build_theme_catalog import build_catalog # type: ignore
|
||||
from type_definitions_theme_catalog import ThemeCatalog, ThemeYAMLFile
|
||||
from scripts.extract_themes import load_whitelist_config
|
||||
from scripts.build_theme_catalog import build_catalog
|
||||
|
||||
CATALOG_JSON = ROOT / 'config' / 'themes' / 'theme_list.json'
|
||||
|
||||
|
|
|
|||
|
|
@ -89,11 +89,8 @@ COLUMN_ORDER = CARD_COLUMN_ORDER
|
|||
TAGGED_COLUMN_ORDER = CARD_COLUMN_ORDER
|
||||
REQUIRED_COLUMNS = REQUIRED_CARD_COLUMNS
|
||||
|
||||
MAIN_MENU_ITEMS: List[str] = ['Build A Deck', 'Setup CSV Files', 'Tag CSV Files', 'Quit']
|
||||
# MAIN_MENU_ITEMS, SETUP_MENU_ITEMS, CSV_DIRECTORY already defined above (lines 67-70)
|
||||
|
||||
SETUP_MENU_ITEMS: List[str] = ['Initial Setup', 'Regenerate CSV', 'Main Menu']
|
||||
|
||||
CSV_DIRECTORY: str = 'csv_files'
|
||||
CARD_FILES_DIRECTORY: str = 'card_files' # Parquet files for consolidated card data
|
||||
|
||||
# ----------------------------------------------------------------------------------
|
||||
|
|
@ -111,11 +108,7 @@ CARD_FILES_PROCESSED_DIR = os.getenv('CARD_FILES_PROCESSED_DIR', os.path.join(CA
|
|||
# Set to '1' or 'true' to enable CSV fallback when Parquet loading fails
|
||||
LEGACY_CSV_COMPAT = os.getenv('LEGACY_CSV_COMPAT', '0').lower() in ('1', 'true', 'on', 'enabled')
|
||||
|
||||
# Configuration for handling null/NA values in DataFrame columns
|
||||
FILL_NA_COLUMNS: Dict[str, Optional[str]] = {
|
||||
'colorIdentity': 'Colorless', # Default color identity for cards without one
|
||||
'faceName': None # Use card's name column value when face name is not available
|
||||
}
|
||||
# FILL_NA_COLUMNS already defined above (lines 75-78)
|
||||
|
||||
# ----------------------------------------------------------------------------------
|
||||
# ALL CARDS CONSOLIDATION FEATURE FLAG
|
||||
|
|
|
|||
|
|
@ -30,14 +30,14 @@ try:
|
|||
import logging_util
|
||||
except Exception:
|
||||
# Fallback for direct module loading
|
||||
import importlib.util # type: ignore
|
||||
import importlib.util
|
||||
root = Path(__file__).resolve().parents[1]
|
||||
lu_path = root / 'logging_util.py'
|
||||
spec = importlib.util.spec_from_file_location('logging_util', str(lu_path))
|
||||
mod = importlib.util.module_from_spec(spec) # type: ignore[arg-type]
|
||||
assert spec and spec.loader
|
||||
spec.loader.exec_module(mod) # type: ignore[assignment]
|
||||
logging_util = mod # type: ignore
|
||||
spec.loader.exec_module(mod)
|
||||
logging_util = mod
|
||||
|
||||
logger = logging_util.logging.getLogger(__name__)
|
||||
logger.setLevel(logging_util.LOG_LEVEL)
|
||||
|
|
|
|||
|
|
@ -240,6 +240,13 @@ def merge_multi_face_rows(
|
|||
|
||||
faces_payload = [_build_face_payload(row) for _, row in group_sorted.iterrows()]
|
||||
|
||||
# M9: Capture back face type for MDFC land detection
|
||||
if len(group_sorted) >= 2 and "type" in group_sorted.columns:
|
||||
back_face_row = group_sorted.iloc[1]
|
||||
back_type = str(back_face_row.get("type", "") or "")
|
||||
if back_type:
|
||||
work_df.at[primary_idx, "backType"] = back_type
|
||||
|
||||
drop_indices.extend(group_sorted.index[1:])
|
||||
|
||||
merged_count += 1
|
||||
|
|
|
|||
|
|
@ -173,7 +173,7 @@ def _merge_summary_recorder(color: str):
|
|||
|
||||
|
||||
def _write_compat_snapshot(df: pd.DataFrame, color: str) -> None:
|
||||
try: # type: ignore[name-defined]
|
||||
try:
|
||||
_DFC_COMPAT_DIR.mkdir(parents=True, exist_ok=True)
|
||||
path = _DFC_COMPAT_DIR / f"{color}_cards_unmerged.csv"
|
||||
df.to_csv(path, index=False)
|
||||
|
|
|
|||
|
|
@ -173,7 +173,7 @@ def _merge_summary_recorder(color: str):
|
|||
|
||||
def _write_compat_snapshot(df: pd.DataFrame, color: str) -> None:
|
||||
"""Write DFC compatibility snapshot (diagnostic output, kept as CSV for now)."""
|
||||
try: # type: ignore[name-defined]
|
||||
try:
|
||||
_DFC_COMPAT_DIR.mkdir(parents=True, exist_ok=True)
|
||||
path = _DFC_COMPAT_DIR / f"{color}_cards_unmerged.csv"
|
||||
df.to_csv(path, index=False) # M3: Kept as CSV (diagnostic only, not main data flow)
|
||||
|
|
|
|||
|
|
@ -11,9 +11,9 @@ def _load_applier():
|
|||
root = Path(__file__).resolve().parents[2]
|
||||
mod_path = root / 'code' / 'tagging' / 'bracket_policy_applier.py'
|
||||
spec = importlib.util.spec_from_file_location('bracket_policy_applier', str(mod_path))
|
||||
mod = importlib.util.module_from_spec(spec) # type: ignore[arg-type]
|
||||
mod = importlib.util.module_from_spec(spec)
|
||||
assert spec and spec.loader
|
||||
spec.loader.exec_module(mod) # type: ignore[assignment]
|
||||
spec.loader.exec_module(mod)
|
||||
return mod
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -30,8 +30,8 @@ def test_card_index_color_identity_list_handles_edge_cases(tmp_path, monkeypatch
|
|||
csv_path = write_csv(tmp_path)
|
||||
monkeypatch.setenv("CARD_INDEX_EXTRA_CSV", str(csv_path))
|
||||
# Force rebuild
|
||||
card_index._CARD_INDEX.clear() # type: ignore
|
||||
card_index._CARD_INDEX_MTIME = None # type: ignore
|
||||
card_index._CARD_INDEX.clear()
|
||||
card_index._CARD_INDEX_MTIME = None
|
||||
card_index.maybe_build_index()
|
||||
|
||||
pool = card_index.get_tag_pool("Blink")
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ from urllib.parse import parse_qs, urlparse
|
|||
import pytest
|
||||
from fastapi.testclient import TestClient
|
||||
|
||||
from code.web.app import app # type: ignore
|
||||
from code.web.app import app
|
||||
from code.web.services.commander_catalog_loader import clear_commander_catalog_cache
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ from pathlib import Path
|
|||
import pytest
|
||||
from fastapi.testclient import TestClient
|
||||
|
||||
from code.web.app import app # type: ignore
|
||||
from code.web.app import app
|
||||
from code.web.services import telemetry
|
||||
from code.web.services.commander_catalog_loader import clear_commander_catalog_cache
|
||||
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ from types import SimpleNamespace
|
|||
import pytest
|
||||
from fastapi.testclient import TestClient
|
||||
|
||||
from code.web.app import app # type: ignore
|
||||
from code.web.app import app
|
||||
from code.web.routes import commanders
|
||||
from code.web.services import commander_catalog_loader
|
||||
from code.web.services.commander_catalog_loader import clear_commander_catalog_cache, load_commander_catalog
|
||||
|
|
|
|||
|
|
@ -24,7 +24,7 @@ def load_app_with_env(**env: str) -> types.ModuleType:
|
|||
os.environ.pop(key, None)
|
||||
for k, v in env.items():
|
||||
os.environ[k] = v
|
||||
import code.web.app as app_module # type: ignore
|
||||
import code.web.app as app_module
|
||||
importlib.reload(app_module)
|
||||
return app_module
|
||||
|
||||
|
|
|
|||
|
|
@ -50,7 +50,7 @@ def _load_catalog() -> Dict[str, Any]:
|
|||
def test_deterministic_build_under_seed():
|
||||
# Import build after setting seed env
|
||||
os.environ['EDITORIAL_SEED'] = '999'
|
||||
from scripts.build_theme_catalog import build_catalog # type: ignore
|
||||
from scripts.build_theme_catalog import build_catalog
|
||||
first = build_catalog(limit=0, verbose=False)
|
||||
second = build_catalog(limit=0, verbose=False)
|
||||
# Drop volatile metadata_info/timestamp fields before comparison
|
||||
|
|
@ -106,7 +106,7 @@ def test_metadata_info_block_coverage():
|
|||
|
||||
|
||||
def test_synergy_commanders_exclusion_of_examples():
|
||||
import yaml # type: ignore
|
||||
import yaml
|
||||
pattern = re.compile(r" - Synergy \(.*\)$")
|
||||
violations: List[str] = []
|
||||
for p in CATALOG_DIR.glob('*.yml'):
|
||||
|
|
@ -128,7 +128,7 @@ def test_synergy_commanders_exclusion_of_examples():
|
|||
|
||||
|
||||
def test_mapping_trigger_specialization_guard():
|
||||
import yaml # type: ignore
|
||||
import yaml
|
||||
assert MAPPING.exists(), "description_mapping.yml missing"
|
||||
mapping_yaml = yaml.safe_load(MAPPING.read_text(encoding='utf-8')) or []
|
||||
triggers: Set[str] = set()
|
||||
|
|
|
|||
|
|
@ -20,7 +20,7 @@ def load_app_with_env(**env: str) -> types.ModuleType:
|
|||
os.environ.pop(key, None)
|
||||
for k, v in env.items():
|
||||
os.environ[k] = v
|
||||
import code.web.app as app_module # type: ignore
|
||||
import code.web.app as app_module
|
||||
importlib.reload(app_module)
|
||||
return app_module
|
||||
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@ class DummyBuilder(ReportingMixin):
|
|||
self.card_library = card_library
|
||||
self.color_identity = colors
|
||||
self.output_lines: List[str] = []
|
||||
self.output_func = self.output_lines.append # type: ignore[assignment]
|
||||
self.output_func = self.output_lines.append
|
||||
self._full_cards_df = None
|
||||
self._combined_cards_df = None
|
||||
self.include_exclude_diagnostics = None
|
||||
|
|
|
|||
|
|
@ -20,7 +20,7 @@ def _stub_modal_matrix(builder: DeckBuilder) -> None:
|
|||
"Forest": {"G": 1},
|
||||
}
|
||||
|
||||
builder._compute_color_source_matrix = MethodType(fake_matrix, builder) # type: ignore[attr-defined]
|
||||
builder._compute_color_source_matrix = MethodType(fake_matrix, builder)
|
||||
|
||||
|
||||
def test_modal_dfc_swaps_basic_when_enabled():
|
||||
|
|
|
|||
|
|
@ -18,7 +18,7 @@ def test_multicopy_clamp_trims_current_stage_additions_only():
|
|||
# Preseed 95 cards in the library
|
||||
b.card_library = {"Filler": {"Count": 95, "Role": "Test", "SubRole": "", "AddedBy": "Test"}}
|
||||
# Set a multi-copy selection that would exceed 100 by 15
|
||||
b._web_multi_copy = { # type: ignore[attr-defined]
|
||||
b._web_multi_copy = {
|
||||
"id": "persistent_petitioners",
|
||||
"name": "Persistent Petitioners",
|
||||
"count": 20,
|
||||
|
|
|
|||
|
|
@ -23,7 +23,7 @@ def test_petitioners_clamp_to_100_and_reduce_creature_slots():
|
|||
"card_advantage": 8, "protection": 4,
|
||||
}
|
||||
# Thread multi-copy selection for Petitioners as a creature archetype
|
||||
b._web_multi_copy = { # type: ignore[attr-defined]
|
||||
b._web_multi_copy = {
|
||||
"id": "persistent_petitioners",
|
||||
"name": "Persistent Petitioners",
|
||||
"count": 40, # intentionally large to trigger clamp/adjustments
|
||||
|
|
|
|||
|
|
@ -17,7 +17,7 @@ def _minimal_ctx(selection: dict):
|
|||
|
||||
b = DeckBuilder(output_func=out, input_func=lambda *_: "", headless=True)
|
||||
# Thread selection and ensure empty library
|
||||
b._web_multi_copy = selection # type: ignore[attr-defined]
|
||||
b._web_multi_copy = selection
|
||||
b.card_library = {}
|
||||
|
||||
ctx = {
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
import importlib
|
||||
import pytest
|
||||
try:
|
||||
from starlette.testclient import TestClient # type: ignore
|
||||
from starlette.testclient import TestClient
|
||||
except Exception: # pragma: no cover - optional dep in CI
|
||||
TestClient = None # type: ignore
|
||||
|
||||
|
|
|
|||
|
|
@ -128,7 +128,7 @@ def _make_request(path: str = "/api/partner/suggestions", query_string: str = ""
|
|||
"client": ("203.0.113.5", 52345),
|
||||
"server": ("testserver", 80),
|
||||
}
|
||||
request = Request(scope, receive=_receive) # type: ignore[arg-type]
|
||||
request = Request(scope, receive=_receive)
|
||||
request.state.request_id = "req-telemetry"
|
||||
return request
|
||||
|
||||
|
|
@ -197,21 +197,21 @@ def test_load_dataset_refresh_retries_after_prior_failure(tmp_path: Path, monkey
|
|||
from code.web.services import orchestrator as orchestrator_service
|
||||
|
||||
original_default = partner_service.DEFAULT_DATASET_PATH
|
||||
original_path = partner_service._DATASET_PATH # type: ignore[attr-defined]
|
||||
original_cache = partner_service._DATASET_CACHE # type: ignore[attr-defined]
|
||||
original_attempted = partner_service._DATASET_REFRESH_ATTEMPTED # type: ignore[attr-defined]
|
||||
original_path = partner_service._DATASET_PATH
|
||||
original_cache = partner_service._DATASET_CACHE
|
||||
original_attempted = partner_service._DATASET_REFRESH_ATTEMPTED
|
||||
|
||||
partner_service.DEFAULT_DATASET_PATH = dataset_path
|
||||
partner_service._DATASET_PATH = dataset_path # type: ignore[attr-defined]
|
||||
partner_service._DATASET_CACHE = None # type: ignore[attr-defined]
|
||||
partner_service._DATASET_REFRESH_ATTEMPTED = True # type: ignore[attr-defined]
|
||||
partner_service._DATASET_PATH = dataset_path
|
||||
partner_service._DATASET_CACHE = None
|
||||
partner_service._DATASET_REFRESH_ATTEMPTED = True
|
||||
|
||||
calls = {"count": 0}
|
||||
|
||||
payload_path = tmp_path / "seed_dataset.json"
|
||||
_write_dataset(payload_path)
|
||||
|
||||
def seeded_refresh(out_func=None, *, force=False, root=None): # type: ignore[override]
|
||||
def seeded_refresh(out_func=None, *, force=False, root=None):
|
||||
calls["count"] += 1
|
||||
dataset_path.write_text(payload_path.read_text(encoding="utf-8"), encoding="utf-8")
|
||||
|
||||
|
|
@ -227,9 +227,9 @@ def test_load_dataset_refresh_retries_after_prior_failure(tmp_path: Path, monkey
|
|||
assert calls["count"] == 1
|
||||
finally:
|
||||
partner_service.DEFAULT_DATASET_PATH = original_default
|
||||
partner_service._DATASET_PATH = original_path # type: ignore[attr-defined]
|
||||
partner_service._DATASET_CACHE = original_cache # type: ignore[attr-defined]
|
||||
partner_service._DATASET_REFRESH_ATTEMPTED = original_attempted # type: ignore[attr-defined]
|
||||
partner_service._DATASET_PATH = original_path
|
||||
partner_service._DATASET_CACHE = original_cache
|
||||
partner_service._DATASET_REFRESH_ATTEMPTED = original_attempted
|
||||
try:
|
||||
dataset_path.unlink()
|
||||
except FileNotFoundError:
|
||||
|
|
|
|||
|
|
@ -33,7 +33,7 @@ def _invoke_helper(
|
|||
) -> list[tuple[list[str], str]]:
|
||||
calls: list[tuple[list[str], str]] = []
|
||||
|
||||
def _fake_run(cmd, check=False, cwd=None): # type: ignore[no-untyped-def]
|
||||
def _fake_run(cmd, check=False, cwd=None):
|
||||
calls.append((list(cmd), cwd))
|
||||
class _Completed:
|
||||
returncode = 0
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@ fastapi = pytest.importorskip("fastapi")
|
|||
def load_app_with_env(**env: str) -> types.ModuleType:
|
||||
for k,v in env.items():
|
||||
os.environ[k] = v
|
||||
import code.web.app as app_module # type: ignore
|
||||
import code.web.app as app_module
|
||||
importlib.reload(app_module)
|
||||
return app_module
|
||||
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
import json
|
||||
from fastapi.testclient import TestClient
|
||||
|
||||
from code.web.app import app # type: ignore
|
||||
from code.web.app import app
|
||||
|
||||
|
||||
def test_preview_includes_curated_examples_regression():
|
||||
|
|
|
|||
|
|
@ -1,8 +1,8 @@
|
|||
import os
|
||||
|
||||
from code.web.services.theme_preview import get_theme_preview, bust_preview_cache # type: ignore
|
||||
from code.web.services import preview_cache as pc # type: ignore
|
||||
from code.web.services.preview_metrics import preview_metrics # type: ignore
|
||||
from code.web.services.theme_preview import get_theme_preview, bust_preview_cache
|
||||
from code.web.services import preview_cache as pc
|
||||
from code.web.services.preview_metrics import preview_metrics
|
||||
|
||||
|
||||
def _prime(slug: str, limit: int = 12, hits: int = 0, *, colors=None):
|
||||
|
|
@ -89,7 +89,7 @@ def test_env_weight_override(monkeypatch):
|
|||
bust_preview_cache()
|
||||
# Clear module-level caches for weights
|
||||
if hasattr(pc, '_EVICT_WEIGHTS_CACHE'):
|
||||
pc._EVICT_WEIGHTS_CACHE = None # type: ignore
|
||||
pc._EVICT_WEIGHTS_CACHE = None
|
||||
# Create two entries: one older with many hits, one fresh with none.
|
||||
_prime('Blink', limit=6, hits=6, colors=None) # older hot entry
|
||||
old_key = next(iter(pc.PREVIEW_CACHE.keys()))
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
import os
|
||||
from code.web.services.theme_preview import get_theme_preview, bust_preview_cache # type: ignore
|
||||
from code.web.services import preview_cache as pc # type: ignore
|
||||
from code.web.services.theme_preview import get_theme_preview, bust_preview_cache
|
||||
from code.web.services import preview_cache as pc
|
||||
|
||||
|
||||
def test_basic_low_score_eviction(monkeypatch):
|
||||
|
|
@ -17,7 +17,7 @@ def test_basic_low_score_eviction(monkeypatch):
|
|||
get_theme_preview('Blink', limit=6, colors=c)
|
||||
# Cache limit 5, inserted 6 distinct -> eviction should have occurred
|
||||
assert len(pc.PREVIEW_CACHE) <= 5
|
||||
from code.web.services.preview_metrics import preview_metrics # type: ignore
|
||||
from code.web.services.preview_metrics import preview_metrics
|
||||
m = preview_metrics()
|
||||
assert m['preview_cache_evictions'] >= 1, 'Expected at least one eviction'
|
||||
assert m['preview_cache_evictions_by_reason'].get('low_score', 0) >= 1
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
from fastapi.testclient import TestClient
|
||||
from code.web.app import app # type: ignore
|
||||
from code.web.app import app
|
||||
|
||||
|
||||
def test_minimal_variant_hides_controls_and_headers():
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ pytestmark = pytest.mark.skip(reason="M4: preview_perf_benchmark module removed
|
|||
def test_fetch_all_theme_slugs_retries(monkeypatch):
|
||||
calls = {"count": 0}
|
||||
|
||||
def fake_fetch(url): # type: ignore[override]
|
||||
def fake_fetch(url):
|
||||
calls["count"] += 1
|
||||
if calls["count"] == 1:
|
||||
raise RuntimeError("transient 500")
|
||||
|
|
@ -27,7 +27,7 @@ def test_fetch_all_theme_slugs_retries(monkeypatch):
|
|||
def test_fetch_all_theme_slugs_page_level_retry(monkeypatch):
|
||||
calls = {"count": 0}
|
||||
|
||||
def fake_fetch_with_retry(url, attempts=3, delay=0.6): # type: ignore[override]
|
||||
def fake_fetch_with_retry(url, attempts=3, delay=0.6):
|
||||
calls["count"] += 1
|
||||
if calls["count"] < 3:
|
||||
raise RuntimeError("service warming up")
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
from fastapi.testclient import TestClient
|
||||
from code.web.app import app # type: ignore
|
||||
from code.web.app import app
|
||||
|
||||
|
||||
def test_preview_fragment_suppress_curated_removes_examples():
|
||||
|
|
|
|||
|
|
@ -3,16 +3,16 @@ from code.web.services import preview_cache as pc
|
|||
|
||||
def _force_interval_elapsed():
|
||||
# Ensure adaptation interval guard passes
|
||||
if pc._LAST_ADAPT_AT is not None: # type: ignore[attr-defined]
|
||||
pc._LAST_ADAPT_AT -= (pc._ADAPT_INTERVAL_S + 1) # type: ignore[attr-defined]
|
||||
if pc._LAST_ADAPT_AT is not None:
|
||||
pc._LAST_ADAPT_AT -= (pc._ADAPT_INTERVAL_S + 1)
|
||||
|
||||
|
||||
def test_ttl_adapts_down_and_up(capsys):
|
||||
# Enable adaptation regardless of env
|
||||
pc._ADAPTATION_ENABLED = True # type: ignore[attr-defined]
|
||||
pc.TTL_SECONDS = pc._TTL_BASE # type: ignore[attr-defined]
|
||||
pc._RECENT_HITS.clear() # type: ignore[attr-defined]
|
||||
pc._LAST_ADAPT_AT = None # type: ignore[attr-defined]
|
||||
pc._ADAPTATION_ENABLED = True
|
||||
pc.TTL_SECONDS = pc._TTL_BASE
|
||||
pc._RECENT_HITS.clear()
|
||||
pc._LAST_ADAPT_AT = None
|
||||
|
||||
# Low hit ratio pattern (~0.1)
|
||||
for _ in range(72):
|
||||
|
|
@ -23,11 +23,11 @@ def test_ttl_adapts_down_and_up(capsys):
|
|||
out1 = capsys.readouterr().out
|
||||
assert "theme_preview_ttl_adapt" in out1, "expected adaptation log for low hit ratio"
|
||||
ttl_after_down = pc.TTL_SECONDS
|
||||
assert ttl_after_down <= pc._TTL_BASE # type: ignore[attr-defined]
|
||||
assert ttl_after_down <= pc._TTL_BASE
|
||||
|
||||
# Force interval elapsed & high hit ratio pattern (~0.9)
|
||||
_force_interval_elapsed()
|
||||
pc._RECENT_HITS.clear() # type: ignore[attr-defined]
|
||||
pc._RECENT_HITS.clear()
|
||||
for _ in range(72):
|
||||
pc.record_request_hit(True)
|
||||
for _ in range(8):
|
||||
|
|
|
|||
|
|
@ -19,17 +19,17 @@ def _client_with_flags(window_s: int = 2, limit_random: int = 2, limit_build: in
|
|||
|
||||
# Force fresh import so RATE_LIMIT_* constants reflect env
|
||||
sys.modules.pop('code.web.app', None)
|
||||
from code.web import app as app_module # type: ignore
|
||||
from code.web import app as app_module
|
||||
# Force override constants for deterministic test
|
||||
try:
|
||||
app_module.RATE_LIMIT_ENABLED = True # type: ignore[attr-defined]
|
||||
app_module.RATE_LIMIT_WINDOW_S = window_s # type: ignore[attr-defined]
|
||||
app_module.RATE_LIMIT_RANDOM = limit_random # type: ignore[attr-defined]
|
||||
app_module.RATE_LIMIT_BUILD = limit_build # type: ignore[attr-defined]
|
||||
app_module.RATE_LIMIT_SUGGEST = limit_suggest # type: ignore[attr-defined]
|
||||
app_module.RATE_LIMIT_ENABLED = True
|
||||
app_module.RATE_LIMIT_WINDOW_S = window_s
|
||||
app_module.RATE_LIMIT_RANDOM = limit_random
|
||||
app_module.RATE_LIMIT_BUILD = limit_build
|
||||
app_module.RATE_LIMIT_SUGGEST = limit_suggest
|
||||
# Reset in-memory counters
|
||||
if hasattr(app_module, '_RL_COUNTS'):
|
||||
app_module._RL_COUNTS.clear() # type: ignore[attr-defined]
|
||||
app_module._RL_COUNTS.clear()
|
||||
except Exception:
|
||||
pass
|
||||
return TestClient(app_module.app)
|
||||
|
|
|
|||
|
|
@ -3,8 +3,8 @@ from pathlib import Path
|
|||
|
||||
from fastapi.testclient import TestClient
|
||||
|
||||
from code.web import app as web_app # type: ignore
|
||||
from code.web.app import app # type: ignore
|
||||
from code.web import app as web_app
|
||||
from code.web.app import app
|
||||
|
||||
# Ensure project root on sys.path for absolute imports
|
||||
ROOT = Path(__file__).resolve().parents[2]
|
||||
|
|
|
|||
|
|
@ -9,17 +9,17 @@ def setup_module(module): # ensure deterministic env weights
|
|||
|
||||
def test_rarity_diminishing():
|
||||
# Monkeypatch internal index
|
||||
card_index._CARD_INDEX.clear() # type: ignore
|
||||
card_index._CARD_INDEX.clear()
|
||||
theme = "Test Theme"
|
||||
card_index._CARD_INDEX[theme] = [ # type: ignore
|
||||
card_index._CARD_INDEX[theme] = [
|
||||
{"name": "Mythic One", "tags": [theme], "color_identity": "G", "mana_cost": "G", "rarity": "mythic"},
|
||||
{"name": "Mythic Two", "tags": [theme], "color_identity": "G", "mana_cost": "G", "rarity": "mythic"},
|
||||
]
|
||||
def no_build():
|
||||
return None
|
||||
sampling.maybe_build_index = no_build # type: ignore
|
||||
sampling.maybe_build_index = no_build
|
||||
cards = sampling.sample_real_cards_for_theme(theme, 2, None, synergies=[theme], commander=None)
|
||||
rarity_weights = [r for c in cards for r in c["reasons"] if r.startswith("rarity_weight_calibrated")] # type: ignore
|
||||
rarity_weights = [r for c in cards for r in c["reasons"] if r.startswith("rarity_weight_calibrated")]
|
||||
assert len(rarity_weights) >= 2
|
||||
v1 = float(rarity_weights[0].split(":")[-1])
|
||||
v2 = float(rarity_weights[1].split(":")[-1])
|
||||
|
|
@ -40,15 +40,15 @@ def test_commander_overlap_monotonic_diminishing():
|
|||
|
||||
|
||||
def test_splash_off_color_penalty_applied():
|
||||
card_index._CARD_INDEX.clear() # type: ignore
|
||||
card_index._CARD_INDEX.clear()
|
||||
theme = "Splash Theme"
|
||||
# Commander W U B R (4 colors)
|
||||
commander = {"name": "CommanderTest", "tags": [theme], "color_identity": "WUBR", "mana_cost": "", "rarity": "mythic"}
|
||||
# Card with single off-color G (W U B R G)
|
||||
splash_card = {"name": "CardSplash", "tags": [theme], "color_identity": "WUBRG", "mana_cost": "G", "rarity": "rare"}
|
||||
card_index._CARD_INDEX[theme] = [commander, splash_card] # type: ignore
|
||||
sampling.maybe_build_index = lambda: None # type: ignore
|
||||
card_index._CARD_INDEX[theme] = [commander, splash_card]
|
||||
sampling.maybe_build_index = lambda: None
|
||||
cards = sampling.sample_real_cards_for_theme(theme, 2, None, synergies=[theme], commander="CommanderTest")
|
||||
splash = next((c for c in cards if c["name"] == "CardSplash"), None)
|
||||
assert splash is not None
|
||||
assert any(r.startswith("splash_off_color_penalty") for r in splash["reasons"]) # type: ignore
|
||||
assert any(r.startswith("splash_off_color_penalty") for r in splash["reasons"])
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
import re
|
||||
from code.web.services.theme_preview import get_theme_preview # type: ignore
|
||||
from code.web.services.theme_preview import get_theme_preview
|
||||
|
||||
# We can't easily execute the JS normalizeCardName in Python, but we can ensure
|
||||
# server-delivered sample names that include appended synergy annotations are not
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@ fastapi = pytest.importorskip("fastapi") # skip if FastAPI missing
|
|||
def load_app_with_env(**env: str) -> types.ModuleType:
|
||||
for k, v in env.items():
|
||||
os.environ[k] = v
|
||||
import code.web.app as app_module # type: ignore
|
||||
import code.web.app as app_module
|
||||
importlib.reload(app_module)
|
||||
return app_module
|
||||
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ import sys
|
|||
from pathlib import Path
|
||||
import pytest
|
||||
from fastapi.testclient import TestClient
|
||||
from code.web.app import app # type: ignore
|
||||
from code.web.app import app
|
||||
|
||||
# Ensure project root on sys.path for absolute imports
|
||||
ROOT = Path(__file__).resolve().parents[2]
|
||||
|
|
|
|||
|
|
@ -146,7 +146,7 @@ def test_generate_theme_catalog_basic(tmp_path: Path, fixed_now: datetime) -> No
|
|||
assert all(row['last_generated_at'] == result.generated_at for row in rows)
|
||||
assert all(row['version'] == result.version for row in rows)
|
||||
|
||||
expected_hash = new_catalog._compute_version_hash([row['theme'] for row in rows]) # type: ignore[attr-defined]
|
||||
expected_hash = new_catalog._compute_version_hash([row['theme'] for row in rows])
|
||||
assert result.version == expected_hash
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ import os
|
|||
import importlib
|
||||
from pathlib import Path
|
||||
from starlette.testclient import TestClient
|
||||
from code.type_definitions_theme_catalog import ThemeCatalog # type: ignore
|
||||
from code.type_definitions_theme_catalog import ThemeCatalog
|
||||
|
||||
CATALOG_PATH = Path('config/themes/theme_list.json')
|
||||
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ def test_theme_list_json_validates_against_pydantic_and_fast_path():
|
|||
raw = json.loads(p.read_text(encoding='utf-8'))
|
||||
|
||||
# Pydantic validation
|
||||
from code.type_definitions_theme_catalog import ThemeCatalog # type: ignore
|
||||
from code.type_definitions_theme_catalog import ThemeCatalog
|
||||
catalog = ThemeCatalog(**raw)
|
||||
assert isinstance(catalog.themes, list) and len(catalog.themes) > 0
|
||||
# Basic fields exist on entries
|
||||
|
|
|
|||
|
|
@ -36,7 +36,7 @@ from fastapi.testclient import TestClient
|
|||
|
||||
|
||||
def _get_app(): # local import to avoid heavy import cost if file unused
|
||||
from code.web.app import app # type: ignore
|
||||
from code.web.app import app
|
||||
return app
|
||||
|
||||
|
||||
|
|
@ -115,13 +115,13 @@ def test_preview_cache_hit_timing(monkeypatch, client):
|
|||
r1 = client.get(f"/themes/fragment/preview/{theme_id}?limit=12")
|
||||
assert r1.status_code == 200
|
||||
# Monkeypatch theme_preview._now to freeze time so second call counts as hit
|
||||
import code.web.services.theme_preview as tp # type: ignore
|
||||
import code.web.services.theme_preview as tp
|
||||
orig_now = tp._now
|
||||
monkeypatch.setattr(tp, "_now", lambda: orig_now())
|
||||
r2 = client.get(f"/themes/fragment/preview/{theme_id}?limit=12")
|
||||
assert r2.status_code == 200
|
||||
# Deterministic service-level verification: second direct function call should short-circuit via cache
|
||||
import code.web.services.theme_preview as tp # type: ignore
|
||||
import code.web.services.theme_preview as tp
|
||||
# Snapshot counters
|
||||
pre_hits = getattr(tp, "_PREVIEW_CACHE_HITS", 0)
|
||||
first_payload = tp.get_theme_preview(theme_id, limit=12)
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@ def _new_client(prewarm: bool = False) -> TestClient:
|
|||
# Remove existing module (if any) so lifespan runs again
|
||||
if 'code.web.app' in list(importlib.sys.modules.keys()):
|
||||
importlib.sys.modules.pop('code.web.app')
|
||||
from code.web.app import app # type: ignore
|
||||
from code.web.app import app
|
||||
return TestClient(app)
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -2,8 +2,8 @@ from __future__ import annotations
|
|||
|
||||
import pytest
|
||||
|
||||
from code.web.services.theme_preview import get_theme_preview # type: ignore
|
||||
from code.web.services.theme_catalog_loader import load_index, slugify, project_detail # type: ignore
|
||||
from code.web.services.theme_preview import get_theme_preview
|
||||
from code.web.services.theme_catalog_loader import load_index, slugify, project_detail
|
||||
|
||||
|
||||
@pytest.mark.parametrize("limit", [8, 12])
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
import os
|
||||
import time
|
||||
import json
|
||||
from code.web.services.theme_preview import get_theme_preview, preview_metrics, bust_preview_cache # type: ignore
|
||||
from code.web.services.theme_preview import get_theme_preview, preview_metrics, bust_preview_cache
|
||||
|
||||
|
||||
def test_colors_filter_constraint_green_subset():
|
||||
|
|
|
|||
|
|
@ -47,10 +47,10 @@ class DummySpellBuilder(SpellAdditionMixin):
|
|||
def rng(self) -> DummyRNG:
|
||||
return self._rng
|
||||
|
||||
def get_theme_context(self) -> ThemeContext: # type: ignore[override]
|
||||
def get_theme_context(self) -> ThemeContext:
|
||||
return self._theme_context
|
||||
|
||||
def add_card(self, name: str, **kwargs: Any) -> None: # type: ignore[override]
|
||||
def add_card(self, name: str, **kwargs: Any) -> None:
|
||||
self.card_library[name] = {"Count": kwargs.get("count", 1)}
|
||||
self.added_cards.append(name)
|
||||
|
||||
|
|
|
|||
|
|
@ -20,7 +20,7 @@ def _fresh_client() -> TestClient:
|
|||
from code.web.services.commander_catalog_loader import clear_commander_catalog_cache
|
||||
|
||||
clear_commander_catalog_cache()
|
||||
from code.web.app import app # type: ignore
|
||||
from code.web.app import app
|
||||
|
||||
client = TestClient(app)
|
||||
from code.web.services import tasks
|
||||
|
|
|
|||
|
|
@ -87,7 +87,7 @@ class ThemeCatalog(BaseModel):
|
|||
def theme_names(self) -> List[str]: # convenience
|
||||
return [t.theme for t in self.themes]
|
||||
|
||||
def model_post_init(self, __context: Any) -> None: # type: ignore[override]
|
||||
def model_post_init(self, __context: Any) -> None:
|
||||
# If only legacy 'provenance' provided, alias to metadata_info
|
||||
if self.metadata_info is None and self.provenance is not None:
|
||||
object.__setattr__(self, 'metadata_info', self.provenance)
|
||||
|
|
@ -135,7 +135,7 @@ class ThemeYAMLFile(BaseModel):
|
|||
|
||||
model_config = ConfigDict(extra='forbid')
|
||||
|
||||
def model_post_init(self, __context: Any) -> None: # type: ignore[override]
|
||||
def model_post_init(self, __context: Any) -> None:
|
||||
if not self.metadata_info and self.provenance:
|
||||
object.__setattr__(self, 'metadata_info', self.provenance)
|
||||
if self.metadata_info and self.provenance:
|
||||
|
|
|
|||
102
code/web/app.py
102
code/web/app.py
|
|
@ -19,9 +19,12 @@ from contextlib import asynccontextmanager
|
|||
from code.deck_builder.summary_telemetry import get_mdfc_metrics, get_partner_metrics, get_theme_metrics
|
||||
from tagging.multi_face_merger import load_merge_summary
|
||||
from .services.combo_utils import detect_all as _detect_all
|
||||
from .services.theme_catalog_loader import prewarm_common_filters, load_index # type: ignore
|
||||
from .services.commander_catalog_loader import load_commander_catalog # type: ignore
|
||||
from .services.tasks import get_session, new_sid, set_session_value # type: ignore
|
||||
from .services.theme_catalog_loader import prewarm_common_filters, load_index
|
||||
from .services.commander_catalog_loader import load_commander_catalog
|
||||
from .services.tasks import get_session, new_sid, set_session_value
|
||||
|
||||
# Logger for app-level logging
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Resolve template/static dirs relative to this file
|
||||
_THIS_DIR = Path(__file__).resolve().parent
|
||||
|
|
@ -53,18 +56,18 @@ async def _lifespan(app: FastAPI): # pragma: no cover - simple infra glue
|
|||
except Exception:
|
||||
pass
|
||||
try:
|
||||
commanders_routes.prewarm_default_page() # type: ignore[attr-defined]
|
||||
commanders_routes.prewarm_default_page()
|
||||
except Exception:
|
||||
pass
|
||||
# Warm preview card index once (updated Phase A: moved to card_index module)
|
||||
try: # local import to avoid cost if preview unused
|
||||
from .services.card_index import maybe_build_index # type: ignore
|
||||
from .services.card_index import maybe_build_index
|
||||
maybe_build_index()
|
||||
except Exception:
|
||||
pass
|
||||
# Warm card browser theme catalog (fast CSV read) and theme index (slower card parsing)
|
||||
try:
|
||||
from .routes.card_browser import get_theme_catalog, get_theme_index # type: ignore
|
||||
from .routes.card_browser import get_theme_catalog, get_theme_index
|
||||
get_theme_catalog() # Fast: just reads CSV
|
||||
get_theme_index() # Slower: parses cards for theme-to-card mapping
|
||||
except Exception:
|
||||
|
|
@ -73,7 +76,7 @@ async def _lifespan(app: FastAPI): # pragma: no cover - simple infra glue
|
|||
try:
|
||||
from code.settings import ENABLE_CARD_DETAILS
|
||||
if ENABLE_CARD_DETAILS:
|
||||
from .routes.card_browser import get_similarity # type: ignore
|
||||
from .routes.card_browser import get_similarity
|
||||
get_similarity() # Pre-initialize singleton (one-time cost: ~2-3s)
|
||||
except Exception:
|
||||
pass
|
||||
|
|
@ -86,7 +89,7 @@ app.add_middleware(GZipMiddleware, minimum_size=500)
|
|||
# Mount static if present
|
||||
if _STATIC_DIR.exists():
|
||||
class CacheStatic(StaticFiles):
|
||||
async def get_response(self, path, scope): # type: ignore[override]
|
||||
async def get_response(self, path, scope):
|
||||
resp = await super().get_response(path, scope)
|
||||
try:
|
||||
# Add basic cache headers for static assets
|
||||
|
|
@ -99,12 +102,38 @@ if _STATIC_DIR.exists():
|
|||
# Jinja templates
|
||||
templates = Jinja2Templates(directory=str(_TEMPLATES_DIR))
|
||||
|
||||
# Add custom Jinja2 filter for card image URLs
|
||||
def card_image_url(card_name: str, size: str = "normal") -> str:
|
||||
"""
|
||||
Generate card image URL (uses local cache if available, falls back to Scryfall).
|
||||
|
||||
For DFC cards (containing ' // '), extracts the front face name.
|
||||
|
||||
Args:
|
||||
card_name: Name of the card (may be "Front // Back" for DFCs)
|
||||
size: Image size ('small' or 'normal')
|
||||
|
||||
Returns:
|
||||
URL for the card image
|
||||
"""
|
||||
from urllib.parse import quote
|
||||
|
||||
# Extract front face name for DFCs (thumbnails always show front face)
|
||||
display_name = card_name
|
||||
if ' // ' in card_name:
|
||||
display_name = card_name.split(' // ')[0].strip()
|
||||
|
||||
# Use our API endpoint which handles cache lookup and fallback
|
||||
return f"/api/images/{size}/{quote(display_name)}"
|
||||
|
||||
templates.env.filters["card_image"] = card_image_url
|
||||
|
||||
# Compatibility shim: accept legacy TemplateResponse(name, {"request": request, ...})
|
||||
# and reorder to the new signature TemplateResponse(request, name, {...}).
|
||||
# Prevents DeprecationWarning noise in tests without touching all call sites.
|
||||
_orig_template_response = templates.TemplateResponse
|
||||
|
||||
def _compat_template_response(*args, **kwargs): # type: ignore[override]
|
||||
def _compat_template_response(*args, **kwargs):
|
||||
try:
|
||||
if args and isinstance(args[0], str):
|
||||
name = args[0]
|
||||
|
|
@ -122,7 +151,7 @@ def _compat_template_response(*args, **kwargs): # type: ignore[override]
|
|||
pass
|
||||
return _orig_template_response(*args, **kwargs)
|
||||
|
||||
templates.TemplateResponse = _compat_template_response # type: ignore[assignment]
|
||||
templates.TemplateResponse = _compat_template_response
|
||||
|
||||
# (Startup prewarm moved to lifespan handler _lifespan)
|
||||
|
||||
|
|
@ -298,7 +327,7 @@ templates.env.globals.update({
|
|||
# Expose catalog hash (for cache versioning / service worker) – best-effort, fallback to 'dev'
|
||||
def _load_catalog_hash() -> str:
|
||||
try: # local import to avoid circular on early load
|
||||
from .services.theme_catalog_loader import CATALOG_JSON # type: ignore
|
||||
from .services.theme_catalog_loader import CATALOG_JSON
|
||||
if CATALOG_JSON.exists():
|
||||
raw = _json.loads(CATALOG_JSON.read_text(encoding="utf-8") or "{}")
|
||||
meta = raw.get("metadata_info") or {}
|
||||
|
|
@ -840,6 +869,12 @@ async def home(request: Request) -> HTMLResponse:
|
|||
return templates.TemplateResponse("home.html", {"request": request, "version": os.getenv("APP_VERSION", "dev")})
|
||||
|
||||
|
||||
@app.get("/docs/components", response_class=HTMLResponse)
|
||||
async def components_library(request: Request) -> HTMLResponse:
|
||||
"""M2 Component Library - showcase of standardized UI components"""
|
||||
return templates.TemplateResponse("docs/components.html", {"request": request})
|
||||
|
||||
|
||||
# Simple health check (hardened)
|
||||
@app.get("/healthz")
|
||||
async def healthz():
|
||||
|
|
@ -916,7 +951,7 @@ async def status_random_theme_stats():
|
|||
if not SHOW_DIAGNOSTICS:
|
||||
raise HTTPException(status_code=404, detail="Not Found")
|
||||
try:
|
||||
from deck_builder.random_entrypoint import get_theme_tag_stats # type: ignore
|
||||
from deck_builder.random_entrypoint import get_theme_tag_stats
|
||||
|
||||
stats = get_theme_tag_stats()
|
||||
return JSONResponse({"ok": True, "stats": stats})
|
||||
|
|
@ -1003,8 +1038,8 @@ async def api_random_build(request: Request):
|
|||
except Exception:
|
||||
timeout_s = max(0.1, float(RANDOM_TIMEOUT_MS) / 1000.0)
|
||||
# Import on-demand to avoid heavy costs at module import time
|
||||
from deck_builder.random_entrypoint import build_random_deck, RandomConstraintsImpossibleError # type: ignore
|
||||
from deck_builder.random_entrypoint import RandomThemeNoMatchError # type: ignore
|
||||
from deck_builder.random_entrypoint import build_random_deck, RandomConstraintsImpossibleError
|
||||
from deck_builder.random_entrypoint import RandomThemeNoMatchError
|
||||
|
||||
res = build_random_deck(
|
||||
theme=theme,
|
||||
|
|
@ -1135,7 +1170,7 @@ async def api_random_full_build(request: Request):
|
|||
timeout_s = max(0.1, float(RANDOM_TIMEOUT_MS) / 1000.0)
|
||||
|
||||
# Build a full deck deterministically
|
||||
from deck_builder.random_entrypoint import build_random_full_deck, RandomConstraintsImpossibleError # type: ignore
|
||||
from deck_builder.random_entrypoint import build_random_full_deck, RandomConstraintsImpossibleError
|
||||
res = build_random_full_deck(
|
||||
theme=theme,
|
||||
constraints=constraints,
|
||||
|
|
@ -1359,7 +1394,7 @@ async def api_random_reroll(request: Request):
|
|||
except Exception:
|
||||
new_seed = None
|
||||
if new_seed is None:
|
||||
from random_util import generate_seed # type: ignore
|
||||
from random_util import generate_seed
|
||||
new_seed = int(generate_seed())
|
||||
|
||||
# Build with the new seed
|
||||
|
|
@ -1370,7 +1405,7 @@ async def api_random_reroll(request: Request):
|
|||
timeout_s = max(0.1, float(RANDOM_TIMEOUT_MS) / 1000.0)
|
||||
attempts = body.get("attempts", int(RANDOM_MAX_ATTEMPTS))
|
||||
|
||||
from deck_builder.random_entrypoint import build_random_full_deck # type: ignore
|
||||
from deck_builder.random_entrypoint import build_random_full_deck
|
||||
res = build_random_full_deck(
|
||||
theme=theme,
|
||||
constraints=constraints,
|
||||
|
|
@ -1751,10 +1786,10 @@ async def hx_random_reroll(request: Request):
|
|||
except Exception:
|
||||
new_seed = None
|
||||
if new_seed is None:
|
||||
from random_util import generate_seed # type: ignore
|
||||
from random_util import generate_seed
|
||||
new_seed = int(generate_seed())
|
||||
# Import outside conditional to avoid UnboundLocalError when branch not taken
|
||||
from deck_builder.random_entrypoint import build_random_full_deck # type: ignore
|
||||
from deck_builder.random_entrypoint import build_random_full_deck
|
||||
try:
|
||||
t0 = time.time()
|
||||
_attempts = int(attempts_override) if attempts_override is not None else int(RANDOM_MAX_ATTEMPTS)
|
||||
|
|
@ -1765,7 +1800,7 @@ async def hx_random_reroll(request: Request):
|
|||
_timeout_s = max(0.1, float(_timeout_ms) / 1000.0)
|
||||
if is_reroll_same:
|
||||
build_t0 = time.time()
|
||||
from headless_runner import run as _run # type: ignore
|
||||
from headless_runner import run as _run
|
||||
# Suppress builder's internal initial export to control artifact generation (matches full random path logic)
|
||||
try:
|
||||
import os as _os
|
||||
|
|
@ -1778,18 +1813,18 @@ async def hx_random_reroll(request: Request):
|
|||
summary = None
|
||||
try:
|
||||
if hasattr(builder, 'build_deck_summary'):
|
||||
summary = builder.build_deck_summary() # type: ignore[attr-defined]
|
||||
summary = builder.build_deck_summary()
|
||||
except Exception:
|
||||
summary = None
|
||||
decklist = []
|
||||
try:
|
||||
if hasattr(builder, 'deck_list_final'):
|
||||
decklist = getattr(builder, 'deck_list_final') # type: ignore[attr-defined]
|
||||
decklist = getattr(builder, 'deck_list_final')
|
||||
except Exception:
|
||||
decklist = []
|
||||
# Controlled artifact export (single pass)
|
||||
csv_path = getattr(builder, 'last_csv_path', None) # type: ignore[attr-defined]
|
||||
txt_path = getattr(builder, 'last_txt_path', None) # type: ignore[attr-defined]
|
||||
csv_path = getattr(builder, 'last_csv_path', None)
|
||||
txt_path = getattr(builder, 'last_txt_path', None)
|
||||
compliance = None
|
||||
try:
|
||||
import os as _os
|
||||
|
|
@ -1797,7 +1832,7 @@ async def hx_random_reroll(request: Request):
|
|||
# Perform exactly one export sequence now
|
||||
if not csv_path and hasattr(builder, 'export_decklist_csv'):
|
||||
try:
|
||||
csv_path = builder.export_decklist_csv() # type: ignore[attr-defined]
|
||||
csv_path = builder.export_decklist_csv()
|
||||
except Exception:
|
||||
csv_path = None
|
||||
if csv_path and isinstance(csv_path, str):
|
||||
|
|
@ -1807,7 +1842,7 @@ async def hx_random_reroll(request: Request):
|
|||
try:
|
||||
base_name = _os.path.basename(base_path) + '.txt'
|
||||
if hasattr(builder, 'export_decklist_text'):
|
||||
txt_path = builder.export_decklist_text(filename=base_name) # type: ignore[attr-defined]
|
||||
txt_path = builder.export_decklist_text(filename=base_name)
|
||||
except Exception:
|
||||
# Fallback: if a txt already exists from a prior build reuse it
|
||||
if _os.path.isfile(base_path + '.txt'):
|
||||
|
|
@ -1822,7 +1857,7 @@ async def hx_random_reroll(request: Request):
|
|||
else:
|
||||
try:
|
||||
if hasattr(builder, 'compute_and_print_compliance'):
|
||||
compliance = builder.compute_and_print_compliance(base_stem=_os.path.basename(base_path)) # type: ignore[attr-defined]
|
||||
compliance = builder.compute_and_print_compliance(base_stem=_os.path.basename(base_path))
|
||||
except Exception:
|
||||
compliance = None
|
||||
if summary:
|
||||
|
|
@ -2016,7 +2051,7 @@ async def hx_random_reroll(request: Request):
|
|||
except Exception:
|
||||
_permalink = None
|
||||
resp = templates.TemplateResponse(
|
||||
"partials/random_result.html", # type: ignore
|
||||
"partials/random_result.html",
|
||||
{
|
||||
"request": request,
|
||||
"seed": int(res.seed),
|
||||
|
|
@ -2212,6 +2247,13 @@ async def setup_status():
|
|||
return JSONResponse({"running": False, "phase": "error"})
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Card Image Serving Endpoint - MOVED TO /routes/api.py
|
||||
# ============================================================================
|
||||
# Image serving logic has been moved to code/web/routes/api.py
|
||||
# The router is included below via: app.include_router(api_routes.router)
|
||||
|
||||
|
||||
# Routers
|
||||
from .routes import build as build_routes # noqa: E402
|
||||
from .routes import configs as config_routes # noqa: E402
|
||||
|
|
@ -2225,6 +2267,7 @@ from .routes import telemetry as telemetry_routes # noqa: E402
|
|||
from .routes import cards as cards_routes # noqa: E402
|
||||
from .routes import card_browser as card_browser_routes # noqa: E402
|
||||
from .routes import compare as compare_routes # noqa: E402
|
||||
from .routes import api as api_routes # noqa: E402
|
||||
app.include_router(build_routes.router)
|
||||
app.include_router(config_routes.router)
|
||||
app.include_router(decks_routes.router)
|
||||
|
|
@ -2237,6 +2280,7 @@ app.include_router(telemetry_routes.router)
|
|||
app.include_router(cards_routes.router)
|
||||
app.include_router(card_browser_routes.router)
|
||||
app.include_router(compare_routes.router)
|
||||
app.include_router(api_routes.router)
|
||||
|
||||
# Warm validation cache early to reduce first-call latency in tests and dev
|
||||
try:
|
||||
|
|
@ -2423,7 +2467,7 @@ async def logs_page(
|
|||
# Respect feature flag
|
||||
raise HTTPException(status_code=404, detail="Not Found")
|
||||
# Reuse status_logs logic
|
||||
data = await status_logs(tail=tail, q=q, level=level) # type: ignore[arg-type]
|
||||
data = await status_logs(tail=tail, q=q, level=level)
|
||||
lines: list[str]
|
||||
if isinstance(data, JSONResponse):
|
||||
payload = data.body
|
||||
|
|
|
|||
299
code/web/routes/api.py
Normal file
299
code/web/routes/api.py
Normal file
|
|
@ -0,0 +1,299 @@
|
|||
"""API endpoints for web services."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import threading
|
||||
from pathlib import Path
|
||||
from urllib.parse import quote_plus
|
||||
|
||||
from fastapi import APIRouter, Query
|
||||
from fastapi.responses import FileResponse, JSONResponse, RedirectResponse
|
||||
|
||||
from code.file_setup.image_cache import ImageCache
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter(prefix="/api")
|
||||
|
||||
# Global image cache instance
|
||||
_image_cache = ImageCache()
|
||||
|
||||
|
||||
@router.get("/images/status")
|
||||
async def get_download_status():
|
||||
"""
|
||||
Get current image download status.
|
||||
|
||||
Returns:
|
||||
JSON response with download status
|
||||
"""
|
||||
import json
|
||||
|
||||
status_file = Path("card_files/images/.download_status.json")
|
||||
|
||||
if not status_file.exists():
|
||||
# Check cache statistics if no download in progress
|
||||
stats = _image_cache.cache_statistics()
|
||||
return JSONResponse({
|
||||
"running": False,
|
||||
"stats": stats
|
||||
})
|
||||
|
||||
try:
|
||||
with status_file.open('r', encoding='utf-8') as f:
|
||||
status = json.load(f)
|
||||
return JSONResponse(status)
|
||||
except Exception as e:
|
||||
logger.warning(f"Could not read status file: {e}")
|
||||
return JSONResponse({
|
||||
"running": False,
|
||||
"error": str(e)
|
||||
})
|
||||
|
||||
|
||||
@router.get("/images/debug")
|
||||
async def get_image_debug():
|
||||
"""
|
||||
Debug endpoint to check image cache configuration.
|
||||
|
||||
Returns:
|
||||
JSON with debug information
|
||||
"""
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
base_dir = Path(_image_cache.base_dir)
|
||||
|
||||
debug_info = {
|
||||
"cache_enabled": _image_cache.is_enabled(),
|
||||
"env_var": os.getenv("CACHE_CARD_IMAGES", "not set"),
|
||||
"base_dir": str(base_dir),
|
||||
"base_dir_exists": base_dir.exists(),
|
||||
"small_dir": str(base_dir / "small"),
|
||||
"small_dir_exists": (base_dir / "small").exists(),
|
||||
"normal_dir": str(base_dir / "normal"),
|
||||
"normal_dir_exists": (base_dir / "normal").exists(),
|
||||
}
|
||||
|
||||
# Count files if directories exist
|
||||
if (base_dir / "small").exists():
|
||||
debug_info["small_count"] = len(list((base_dir / "small").glob("*.jpg")))
|
||||
if (base_dir / "normal").exists():
|
||||
debug_info["normal_count"] = len(list((base_dir / "normal").glob("*.jpg")))
|
||||
|
||||
# Test with a sample card name
|
||||
test_card = "Lightning Bolt"
|
||||
debug_info["test_card"] = test_card
|
||||
test_path_small = _image_cache.get_image_path(test_card, "small")
|
||||
test_path_normal = _image_cache.get_image_path(test_card, "normal")
|
||||
debug_info["test_path_small"] = str(test_path_small) if test_path_small else None
|
||||
debug_info["test_path_normal"] = str(test_path_normal) if test_path_normal else None
|
||||
debug_info["test_exists_small"] = test_path_small.exists() if test_path_small else False
|
||||
debug_info["test_exists_normal"] = test_path_normal.exists() if test_path_normal else False
|
||||
|
||||
return JSONResponse(debug_info)
|
||||
|
||||
|
||||
@router.get("/images/{size}/{card_name}")
|
||||
async def get_card_image(size: str, card_name: str, face: str = Query(default="front")):
|
||||
"""
|
||||
Serve card image from cache or redirect to Scryfall API.
|
||||
|
||||
Args:
|
||||
size: Image size ('small' or 'normal')
|
||||
card_name: Name of the card
|
||||
face: Which face to show ('front' or 'back') for DFC cards
|
||||
|
||||
Returns:
|
||||
FileResponse if cached locally, RedirectResponse to Scryfall API otherwise
|
||||
"""
|
||||
# Validate size parameter
|
||||
if size not in ["small", "normal"]:
|
||||
size = "normal"
|
||||
|
||||
# Check if caching is enabled
|
||||
cache_enabled = _image_cache.is_enabled()
|
||||
|
||||
# Check if image exists in cache
|
||||
if cache_enabled:
|
||||
image_path = None
|
||||
|
||||
# For DFC cards, handle front/back faces differently
|
||||
if " // " in card_name:
|
||||
if face == "back":
|
||||
# For back face, ONLY try the back face name
|
||||
back_face = card_name.split(" // ")[1].strip()
|
||||
logger.debug(f"DFC back face requested: {back_face}")
|
||||
image_path = _image_cache.get_image_path(back_face, size)
|
||||
else:
|
||||
# For front face (or unspecified), try front face name
|
||||
front_face = card_name.split(" // ")[0].strip()
|
||||
logger.debug(f"DFC front face requested: {front_face}")
|
||||
image_path = _image_cache.get_image_path(front_face, size)
|
||||
else:
|
||||
# Single-faced card, try exact name
|
||||
image_path = _image_cache.get_image_path(card_name, size)
|
||||
|
||||
if image_path and image_path.exists():
|
||||
logger.info(f"Serving cached image: {card_name} ({size}, {face})")
|
||||
return FileResponse(
|
||||
image_path,
|
||||
media_type="image/jpeg",
|
||||
headers={
|
||||
"Cache-Control": "public, max-age=31536000", # 1 year
|
||||
}
|
||||
)
|
||||
else:
|
||||
logger.debug(f"No cached image found for: {card_name} (face: {face})")
|
||||
|
||||
# Fallback to Scryfall API
|
||||
# For back face requests of DFC cards, we need the full card name
|
||||
scryfall_card_name = card_name
|
||||
scryfall_params = f"fuzzy={quote_plus(scryfall_card_name)}&format=image&version={size}"
|
||||
|
||||
# If this is a back face request, try to find the full DFC name
|
||||
if face == "back":
|
||||
try:
|
||||
from code.services.all_cards_loader import AllCardsLoader
|
||||
loader = AllCardsLoader()
|
||||
df = loader.load()
|
||||
|
||||
# Look for cards where this face name appears in the card_faces
|
||||
# The card name format is "Front // Back"
|
||||
matching = df[df['name'].str.contains(card_name, case=False, na=False, regex=False)]
|
||||
if not matching.empty:
|
||||
# Find DFC cards (containing ' // ')
|
||||
dfc_matches = matching[matching['name'].str.contains(' // ', na=False, regex=False)]
|
||||
if not dfc_matches.empty:
|
||||
# Use the first matching DFC card's full name
|
||||
full_name = dfc_matches.iloc[0]['name']
|
||||
scryfall_card_name = full_name
|
||||
# Add face parameter to Scryfall request
|
||||
scryfall_params = f"exact={quote_plus(full_name)}&format=image&version={size}&face=back"
|
||||
except Exception as e:
|
||||
logger.warning(f"Could not lookup full card name for back face '{card_name}': {e}")
|
||||
|
||||
scryfall_url = f"https://api.scryfall.com/cards/named?{scryfall_params}"
|
||||
return RedirectResponse(scryfall_url)
|
||||
|
||||
|
||||
@router.post("/images/download")
|
||||
async def download_images():
|
||||
"""
|
||||
Start downloading card images in background.
|
||||
|
||||
Returns:
|
||||
JSON response with status
|
||||
"""
|
||||
if not _image_cache.is_enabled():
|
||||
return JSONResponse({
|
||||
"ok": False,
|
||||
"message": "Image caching is disabled. Set CACHE_CARD_IMAGES=1 to enable."
|
||||
}, status_code=400)
|
||||
|
||||
# Write initial status
|
||||
try:
|
||||
status_dir = Path("card_files/images")
|
||||
status_dir.mkdir(parents=True, exist_ok=True)
|
||||
status_file = status_dir / ".download_status.json"
|
||||
|
||||
import json
|
||||
with status_file.open('w', encoding='utf-8') as f:
|
||||
json.dump({
|
||||
"running": True,
|
||||
"phase": "bulk_data",
|
||||
"message": "Downloading Scryfall bulk data...",
|
||||
"current": 0,
|
||||
"total": 0,
|
||||
"percentage": 0
|
||||
}, f)
|
||||
except Exception as e:
|
||||
logger.warning(f"Could not write initial status: {e}")
|
||||
|
||||
# Start download in background thread
|
||||
def _download_task():
|
||||
import json
|
||||
status_file = Path("card_files/images/.download_status.json")
|
||||
|
||||
try:
|
||||
# Download bulk data first
|
||||
logger.info("[IMAGE DOWNLOAD] Starting bulk data download...")
|
||||
|
||||
def bulk_progress(downloaded: int, total: int):
|
||||
"""Progress callback for bulk data download."""
|
||||
try:
|
||||
percentage = int(downloaded / total * 100) if total > 0 else 0
|
||||
with status_file.open('w', encoding='utf-8') as f:
|
||||
json.dump({
|
||||
"running": True,
|
||||
"phase": "bulk_data",
|
||||
"message": f"Downloading bulk data: {percentage}%",
|
||||
"current": downloaded,
|
||||
"total": total,
|
||||
"percentage": percentage
|
||||
}, f)
|
||||
except Exception as e:
|
||||
logger.warning(f"Could not update bulk progress: {e}")
|
||||
|
||||
_image_cache.download_bulk_data(progress_callback=bulk_progress)
|
||||
|
||||
# Download images
|
||||
logger.info("[IMAGE DOWNLOAD] Starting image downloads...")
|
||||
|
||||
def image_progress(current: int, total: int, card_name: str):
|
||||
"""Progress callback for image downloads."""
|
||||
try:
|
||||
percentage = int(current / total * 100) if total > 0 else 0
|
||||
with status_file.open('w', encoding='utf-8') as f:
|
||||
json.dump({
|
||||
"running": True,
|
||||
"phase": "images",
|
||||
"message": f"Downloading images: {card_name}",
|
||||
"current": current,
|
||||
"total": total,
|
||||
"percentage": percentage
|
||||
}, f)
|
||||
|
||||
# Log progress every 100 cards
|
||||
if current % 100 == 0:
|
||||
logger.info(f"[IMAGE DOWNLOAD] Progress: {current}/{total} ({percentage}%)")
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f"Could not update image progress: {e}")
|
||||
|
||||
stats = _image_cache.download_images(progress_callback=image_progress)
|
||||
|
||||
# Write completion status
|
||||
with status_file.open('w', encoding='utf-8') as f:
|
||||
json.dump({
|
||||
"running": False,
|
||||
"phase": "complete",
|
||||
"message": f"Download complete: {stats.get('downloaded', 0)} new images",
|
||||
"stats": stats,
|
||||
"percentage": 100
|
||||
}, f)
|
||||
|
||||
logger.info(f"[IMAGE DOWNLOAD] Complete: {stats}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"[IMAGE DOWNLOAD] Failed: {e}", exc_info=True)
|
||||
try:
|
||||
with status_file.open('w', encoding='utf-8') as f:
|
||||
json.dump({
|
||||
"running": False,
|
||||
"phase": "error",
|
||||
"message": f"Download failed: {str(e)}",
|
||||
"percentage": 0
|
||||
}, f)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Start background thread
|
||||
thread = threading.Thread(target=_download_task, daemon=True)
|
||||
thread.start()
|
||||
|
||||
return JSONResponse({
|
||||
"ok": True,
|
||||
"message": "Image download started in background"
|
||||
}, status_code=202)
|
||||
|
|
@ -25,11 +25,12 @@ from ..services.build_utils import (
|
|||
owned_set as owned_set_helper,
|
||||
builder_present_names,
|
||||
builder_display_map,
|
||||
commander_hover_context,
|
||||
)
|
||||
from ..app import templates
|
||||
from deck_builder import builder_constants as bc
|
||||
from ..services import orchestrator as orch
|
||||
from ..services.orchestrator import is_setup_ready as _is_setup_ready, is_setup_stale as _is_setup_stale # type: ignore
|
||||
from ..services.orchestrator import is_setup_ready as _is_setup_ready, is_setup_stale as _is_setup_stale
|
||||
from ..services.build_utils import owned_names as owned_names_helper
|
||||
from ..services.tasks import get_session, new_sid
|
||||
from html import escape as _esc
|
||||
|
|
@ -118,7 +119,7 @@ def _available_cards_normalized() -> tuple[set[str], dict[str, str]]:
|
|||
from deck_builder.include_exclude_utils import normalize_punctuation
|
||||
except Exception:
|
||||
# Fallback: identity normalization
|
||||
def normalize_punctuation(x: str) -> str: # type: ignore
|
||||
def normalize_punctuation(x: str) -> str:
|
||||
return str(x).strip().casefold()
|
||||
norm_map: dict[str, str] = {}
|
||||
for name in names:
|
||||
|
|
@ -469,7 +470,7 @@ def _background_options_from_commander_catalog() -> list[dict[str, Any]]:
|
|||
|
||||
seen: set[str] = set()
|
||||
options: list[dict[str, Any]] = []
|
||||
for record in getattr(catalog, "entries", ()): # type: ignore[attr-defined]
|
||||
for record in getattr(catalog, "entries", ()):
|
||||
if not getattr(record, "is_background", False):
|
||||
continue
|
||||
name = getattr(record, "display_name", None)
|
||||
|
|
@ -1107,6 +1108,8 @@ async def build_index(request: Request) -> HTMLResponse:
|
|||
if q_commander:
|
||||
# Persist a human-friendly commander name into session for the wizard
|
||||
sess["commander"] = str(q_commander)
|
||||
# Set flag to indicate this is a quick-build scenario
|
||||
sess["quick_build"] = True
|
||||
except Exception:
|
||||
pass
|
||||
return_url = None
|
||||
|
|
@ -1146,12 +1149,17 @@ async def build_index(request: Request) -> HTMLResponse:
|
|||
last_step = 2
|
||||
else:
|
||||
last_step = 1
|
||||
# Only pass commander to template if coming from commander browser (?commander= query param)
|
||||
# This prevents stale commander from being pre-filled on subsequent builds
|
||||
# The query param only exists on initial navigation from commander browser
|
||||
should_auto_fill = q_commander is not None
|
||||
|
||||
resp = templates.TemplateResponse(
|
||||
request,
|
||||
"build/index.html",
|
||||
{
|
||||
"sid": sid,
|
||||
"commander": sess.get("commander"),
|
||||
"commander": sess.get("commander") if should_auto_fill else None,
|
||||
"tags": sess.get("tags", []),
|
||||
"name": sess.get("custom_export_base"),
|
||||
"last_step": last_step,
|
||||
|
|
@ -1349,6 +1357,19 @@ async def build_new_modal(request: Request) -> HTMLResponse:
|
|||
for key in skip_keys:
|
||||
sess.pop(key, None)
|
||||
|
||||
# M2: Check if this is a quick-build scenario (from commander browser)
|
||||
# Use the quick_build flag set by /build route when ?commander= param present
|
||||
is_quick_build = sess.pop("quick_build", False) # Pop to consume the flag
|
||||
|
||||
# M2: Clear commander and form selections for fresh start (unless quick build)
|
||||
if not is_quick_build:
|
||||
commander_keys = [
|
||||
"commander", "partner", "background", "commander_mode",
|
||||
"themes", "bracket"
|
||||
]
|
||||
for key in commander_keys:
|
||||
sess.pop(key, None)
|
||||
|
||||
theme_context = _custom_theme_context(request, sess)
|
||||
ctx = {
|
||||
"request": request,
|
||||
|
|
@ -1361,6 +1382,7 @@ async def build_new_modal(request: Request) -> HTMLResponse:
|
|||
"enable_batch_build": ENABLE_BATCH_BUILD,
|
||||
"ideals_ui_mode": WEB_IDEALS_UI, # 'input' or 'slider'
|
||||
"form": {
|
||||
"commander": sess.get("commander", ""), # Pre-fill for quick-build
|
||||
"prefer_combos": bool(sess.get("prefer_combos")),
|
||||
"combo_count": sess.get("combo_target_count"),
|
||||
"combo_balance": sess.get("combo_balance"),
|
||||
|
|
@ -1483,20 +1505,14 @@ async def build_new_inspect(request: Request, name: str = Query(...)) -> HTMLRes
|
|||
merged_tags.append(token)
|
||||
ctx["tags"] = merged_tags
|
||||
|
||||
# Deduplicate recommended: remove any that are already in partner_tags
|
||||
partner_tags_lower = {str(tag).strip().casefold() for tag in partner_tags}
|
||||
existing_recommended = ctx.get("recommended") or []
|
||||
merged_recommended: list[str] = []
|
||||
rec_seen: set[str] = set()
|
||||
for source in (partner_tags, existing_recommended):
|
||||
for tag in source:
|
||||
token = str(tag).strip()
|
||||
if not token:
|
||||
continue
|
||||
key = token.casefold()
|
||||
if key in rec_seen:
|
||||
continue
|
||||
rec_seen.add(key)
|
||||
merged_recommended.append(token)
|
||||
ctx["recommended"] = merged_recommended
|
||||
deduplicated_recommended = [
|
||||
tag for tag in existing_recommended
|
||||
if str(tag).strip().casefold() not in partner_tags_lower
|
||||
]
|
||||
ctx["recommended"] = deduplicated_recommended
|
||||
|
||||
reason_map = dict(ctx.get("recommended_reasons") or {})
|
||||
for tag in partner_tags:
|
||||
|
|
@ -2849,7 +2865,7 @@ async def build_step5_rewind(request: Request, to: str = Form(...)) -> HTMLRespo
|
|||
snap = h.get("snapshot")
|
||||
break
|
||||
if snap is not None:
|
||||
orch._restore_builder(ctx["builder"], snap) # type: ignore[attr-defined]
|
||||
orch._restore_builder(ctx["builder"], snap)
|
||||
ctx["idx"] = int(target_i) - 1
|
||||
ctx["last_visible_idx"] = int(target_i) - 1
|
||||
except Exception:
|
||||
|
|
@ -2907,6 +2923,11 @@ async def build_step2_get(request: Request) -> HTMLResponse:
|
|||
if is_gc and (sel_br is None or int(sel_br) < 3):
|
||||
sel_br = 3
|
||||
partner_enabled = bool(sess.get("partner_enabled") and ENABLE_PARTNER_MECHANICS)
|
||||
|
||||
import logging
|
||||
logger = logging.getLogger(__name__)
|
||||
logger.info(f"Step2 GET: commander={commander}, partner_enabled={partner_enabled}, secondary={sess.get('secondary_commander')}")
|
||||
|
||||
context = {
|
||||
"request": request,
|
||||
"commander": {"name": commander},
|
||||
|
|
@ -2940,7 +2961,22 @@ async def build_step2_get(request: Request) -> HTMLResponse:
|
|||
)
|
||||
partner_tags = context.pop("partner_theme_tags", None)
|
||||
if partner_tags:
|
||||
import logging
|
||||
logger = logging.getLogger(__name__)
|
||||
context["tags"] = partner_tags
|
||||
# Deduplicate recommended tags: remove any that are already in partner_tags
|
||||
partner_tags_lower = {str(tag).strip().casefold() for tag in partner_tags}
|
||||
original_recommended = context.get("recommended", [])
|
||||
deduplicated_recommended = [
|
||||
tag for tag in original_recommended
|
||||
if str(tag).strip().casefold() not in partner_tags_lower
|
||||
]
|
||||
logger.info(
|
||||
f"Step2: partner_tags={len(partner_tags)}, "
|
||||
f"original_recommended={len(original_recommended)}, "
|
||||
f"deduplicated_recommended={len(deduplicated_recommended)}"
|
||||
)
|
||||
context["recommended"] = deduplicated_recommended
|
||||
resp = templates.TemplateResponse("build/_step2.html", context)
|
||||
resp.set_cookie("sid", sid, httponly=True, samesite="lax")
|
||||
return resp
|
||||
|
|
@ -3266,6 +3302,57 @@ async def build_step3_get(request: Request) -> HTMLResponse:
|
|||
sess["last_step"] = 3
|
||||
defaults = orch.ideal_defaults()
|
||||
values = sess.get("ideals") or defaults
|
||||
|
||||
# Check if any skip flags are enabled to show skeleton automation page
|
||||
skip_flags = {
|
||||
"skip_lands": "land selection",
|
||||
"skip_to_misc": "land selection",
|
||||
"skip_basics": "basic lands",
|
||||
"skip_staples": "staple lands",
|
||||
"skip_kindred": "kindred lands",
|
||||
"skip_fetches": "fetch lands",
|
||||
"skip_duals": "dual lands",
|
||||
"skip_triomes": "triome lands",
|
||||
"skip_all_creatures": "creature selection",
|
||||
"skip_creature_primary": "primary creatures",
|
||||
"skip_creature_secondary": "secondary creatures",
|
||||
"skip_creature_fill": "creature fills",
|
||||
"skip_all_spells": "spell selection",
|
||||
"skip_ramp": "ramp spells",
|
||||
"skip_removal": "removal spells",
|
||||
"skip_wipes": "board wipes",
|
||||
"skip_card_advantage": "card advantage spells",
|
||||
"skip_protection": "protection spells",
|
||||
"skip_spell_fill": "spell fills",
|
||||
}
|
||||
|
||||
active_skips = [desc for key, desc in skip_flags.items() if sess.get(key, False)]
|
||||
|
||||
if active_skips:
|
||||
# Show skeleton automation page with auto-submit
|
||||
automation_parts = []
|
||||
if any("land" in s for s in active_skips):
|
||||
automation_parts.append("lands")
|
||||
if any("creature" in s for s in active_skips):
|
||||
automation_parts.append("creatures")
|
||||
if any("spell" in s for s in active_skips):
|
||||
automation_parts.append("spells")
|
||||
|
||||
automation_message = f"Applying default values for {', '.join(automation_parts)}..."
|
||||
|
||||
resp = templates.TemplateResponse(
|
||||
"build/_step3_skeleton.html",
|
||||
{
|
||||
"request": request,
|
||||
"defaults": defaults,
|
||||
"commander": sess.get("commander"),
|
||||
"automation_message": automation_message,
|
||||
},
|
||||
)
|
||||
resp.set_cookie("sid", sid, httponly=True, samesite="lax")
|
||||
return resp
|
||||
|
||||
# No skips enabled, show normal form
|
||||
resp = templates.TemplateResponse(
|
||||
"build/_step3.html",
|
||||
{
|
||||
|
|
@ -3782,7 +3869,7 @@ async def build_step5_reset_stage(request: Request) -> HTMLResponse:
|
|||
if not ctx or not ctx.get("snapshot"):
|
||||
return await build_step5_get(request)
|
||||
try:
|
||||
orch._restore_builder(ctx["builder"], ctx["snapshot"]) # type: ignore[attr-defined]
|
||||
orch._restore_builder(ctx["builder"], ctx["snapshot"])
|
||||
except Exception:
|
||||
return await build_step5_get(request)
|
||||
# Re-render step 5 with cleared added list
|
||||
|
|
@ -3844,6 +3931,16 @@ async def build_step5_summary(request: Request, token: int = Query(0)) -> HTMLRe
|
|||
ctx["synergies"] = synergies
|
||||
ctx["summary_ready"] = True
|
||||
ctx["summary_token"] = active_token
|
||||
|
||||
# Add commander hover context for color identity and theme tags
|
||||
hover_meta = commander_hover_context(
|
||||
commander_name=ctx.get("commander"),
|
||||
deck_tags=sess.get("tags"),
|
||||
summary=summary_data,
|
||||
combined=ctx.get("combined_commander"),
|
||||
)
|
||||
ctx.update(hover_meta)
|
||||
|
||||
response = templates.TemplateResponse("partials/deck_summary.html", ctx)
|
||||
response.set_cookie("sid", sid, httponly=True, samesite="lax")
|
||||
return response
|
||||
|
|
@ -4196,7 +4293,7 @@ async def build_alternatives(
|
|||
try:
|
||||
if rng is not None:
|
||||
return rng.sample(seq, limit) if len(seq) >= limit else list(seq)
|
||||
import random as _rnd # type: ignore
|
||||
import random as _rnd
|
||||
return _rnd.sample(seq, limit) if len(seq) >= limit else list(seq)
|
||||
except Exception:
|
||||
return list(seq[:limit])
|
||||
|
|
@ -4247,7 +4344,7 @@ async def build_alternatives(
|
|||
# Helper: map display names
|
||||
def _display_map_for(lower_pool: set[str]) -> dict[str, str]:
|
||||
try:
|
||||
return builder_display_map(b, lower_pool) # type: ignore[arg-type]
|
||||
return builder_display_map(b, lower_pool)
|
||||
except Exception:
|
||||
return {nm: nm for nm in lower_pool}
|
||||
|
||||
|
|
@ -4425,7 +4522,7 @@ async def build_alternatives(
|
|||
pass
|
||||
# Sort by priority like the builder
|
||||
try:
|
||||
pool = bu.sort_by_priority(pool, ["edhrecRank","manaValue"]) # type: ignore[arg-type]
|
||||
pool = bu.sort_by_priority(pool, ["edhrecRank","manaValue"])
|
||||
except Exception:
|
||||
pass
|
||||
# Exclusions and ownership (for non-random roles this stays before slicing)
|
||||
|
|
@ -4923,13 +5020,13 @@ async def build_compliance_panel(request: Request) -> HTMLResponse:
|
|||
comp = None
|
||||
try:
|
||||
if hasattr(b, 'compute_and_print_compliance'):
|
||||
comp = b.compute_and_print_compliance(base_stem=None) # type: ignore[attr-defined]
|
||||
comp = b.compute_and_print_compliance(base_stem=None)
|
||||
except Exception:
|
||||
comp = None
|
||||
try:
|
||||
if comp:
|
||||
from ..services import orchestrator as orch
|
||||
comp = orch._attach_enforcement_plan(b, comp) # type: ignore[attr-defined]
|
||||
comp = orch._attach_enforcement_plan(b, comp)
|
||||
except Exception:
|
||||
pass
|
||||
if not comp:
|
||||
|
|
@ -5054,11 +5151,11 @@ async def build_enforce_apply(request: Request) -> HTMLResponse:
|
|||
# If missing, export once to establish base
|
||||
if not base_stem:
|
||||
try:
|
||||
ctx["csv_path"] = b.export_decklist_csv() # type: ignore[attr-defined]
|
||||
ctx["csv_path"] = b.export_decklist_csv()
|
||||
import os as _os
|
||||
base_stem = _os.path.splitext(_os.path.basename(ctx["csv_path"]))[0]
|
||||
# Also produce a text export for completeness
|
||||
ctx["txt_path"] = b.export_decklist_text(filename=base_stem + '.txt') # type: ignore[attr-defined]
|
||||
ctx["txt_path"] = b.export_decklist_text(filename=base_stem + '.txt')
|
||||
except Exception:
|
||||
base_stem = None
|
||||
# Add lock placeholders into the library before enforcement so user choices are present
|
||||
|
|
@ -5103,7 +5200,7 @@ async def build_enforce_apply(request: Request) -> HTMLResponse:
|
|||
pass
|
||||
# Run enforcement + re-exports (tops up to 100 internally)
|
||||
try:
|
||||
rep = b.enforce_and_reexport(base_stem=base_stem, mode='auto') # type: ignore[attr-defined]
|
||||
rep = b.enforce_and_reexport(base_stem=base_stem, mode='auto')
|
||||
except Exception as e:
|
||||
err_ctx = step5_error_ctx(request, sess, f"Enforcement failed: {e}")
|
||||
resp = templates.TemplateResponse("build/_step5.html", err_ctx)
|
||||
|
|
@ -5177,13 +5274,13 @@ async def build_enforcement_fullpage(request: Request) -> HTMLResponse:
|
|||
comp = None
|
||||
try:
|
||||
if hasattr(b, 'compute_and_print_compliance'):
|
||||
comp = b.compute_and_print_compliance(base_stem=None) # type: ignore[attr-defined]
|
||||
comp = b.compute_and_print_compliance(base_stem=None)
|
||||
except Exception:
|
||||
comp = None
|
||||
try:
|
||||
if comp:
|
||||
from ..services import orchestrator as orch
|
||||
comp = orch._attach_enforcement_plan(b, comp) # type: ignore[attr-defined]
|
||||
comp = orch._attach_enforcement_plan(b, comp)
|
||||
except Exception:
|
||||
pass
|
||||
try:
|
||||
|
|
|
|||
|
|
@ -425,7 +425,7 @@ async def decks_compare(request: Request, A: Optional[str] = None, B: Optional[s
|
|||
mt_val = str(int(mt))
|
||||
except Exception:
|
||||
mt_val = "0"
|
||||
options.append({"name": it.get("name"), "label": label, "mtime": mt_val}) # type: ignore[arg-type]
|
||||
options.append({"name": it.get("name"), "label": label, "mtime": mt_val})
|
||||
|
||||
diffs = None
|
||||
metaA: Dict[str, str] = {}
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ from pathlib import Path
|
|||
import json as _json
|
||||
from fastapi.responses import HTMLResponse, JSONResponse
|
||||
from ..app import templates
|
||||
from ..services.orchestrator import _ensure_setup_ready # type: ignore
|
||||
from ..services.orchestrator import _ensure_setup_ready
|
||||
|
||||
router = APIRouter(prefix="/setup")
|
||||
|
||||
|
|
@ -21,7 +21,7 @@ def _kickoff_setup_async(force: bool = False):
|
|||
def runner():
|
||||
try:
|
||||
print(f"[SETUP THREAD] Starting setup/tagging (force={force})...")
|
||||
_ensure_setup_ready(print, force=force) # type: ignore[arg-type]
|
||||
_ensure_setup_ready(print, force=force)
|
||||
print("[SETUP THREAD] Setup/tagging completed successfully")
|
||||
except Exception as e: # pragma: no cover - background best effort
|
||||
try:
|
||||
|
|
@ -36,7 +36,7 @@ def _kickoff_setup_async(force: bool = False):
|
|||
|
||||
|
||||
@router.get("/running", response_class=HTMLResponse)
|
||||
async def setup_running(request: Request, start: Optional[int] = 0, next: Optional[str] = None, force: Optional[bool] = None) -> HTMLResponse: # type: ignore[override]
|
||||
async def setup_running(request: Request, start: Optional[int] = 0, next: Optional[str] = None, force: Optional[bool] = None) -> HTMLResponse:
|
||||
# Optionally start the setup/tagging in the background if requested
|
||||
try:
|
||||
if start and int(start) != 0:
|
||||
|
|
@ -195,7 +195,11 @@ async def download_github():
|
|||
@router.get("/", response_class=HTMLResponse)
|
||||
async def setup_index(request: Request) -> HTMLResponse:
|
||||
import code.settings as settings
|
||||
from code.file_setup.image_cache import ImageCache
|
||||
|
||||
image_cache = ImageCache()
|
||||
return templates.TemplateResponse("setup/index.html", {
|
||||
"request": request,
|
||||
"similarity_enabled": settings.ENABLE_CARD_SIMILARITIES
|
||||
"similarity_enabled": settings.ENABLE_CARD_SIMILARITIES,
|
||||
"image_cache_enabled": image_cache.is_enabled()
|
||||
})
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ from typing import Optional, Dict, Any
|
|||
|
||||
from fastapi import APIRouter, Request, HTTPException, Query
|
||||
from fastapi import BackgroundTasks
|
||||
from ..services.orchestrator import _ensure_setup_ready, _run_theme_metadata_enrichment # type: ignore
|
||||
from ..services.orchestrator import _ensure_setup_ready, _run_theme_metadata_enrichment
|
||||
from fastapi.responses import JSONResponse, HTMLResponse
|
||||
from fastapi.templating import Jinja2Templates
|
||||
from ..services.theme_catalog_loader import (
|
||||
|
|
@ -17,10 +17,10 @@ from ..services.theme_catalog_loader import (
|
|||
filter_slugs_fast,
|
||||
summaries_for_slugs,
|
||||
)
|
||||
from ..services.theme_preview import get_theme_preview # type: ignore
|
||||
from ..services.theme_catalog_loader import catalog_metrics, prewarm_common_filters # type: ignore
|
||||
from ..services.theme_preview import preview_metrics # type: ignore
|
||||
from ..services import theme_preview as _theme_preview_mod # type: ignore # for error counters
|
||||
from ..services.theme_preview import get_theme_preview
|
||||
from ..services.theme_catalog_loader import catalog_metrics, prewarm_common_filters
|
||||
from ..services.theme_preview import preview_metrics
|
||||
from ..services import theme_preview as _theme_preview_mod # for error counters
|
||||
import os
|
||||
from fastapi import Body
|
||||
|
||||
|
|
@ -36,7 +36,7 @@ router = APIRouter(prefix="/themes", tags=["themes"]) # /themes/status
|
|||
|
||||
# Reuse the main app's template environment so nav globals stay consistent.
|
||||
try: # circular-safe import: app defines templates before importing this router
|
||||
from ..app import templates as _templates # type: ignore
|
||||
from ..app import templates as _templates
|
||||
except Exception: # Fallback (tests/minimal contexts)
|
||||
_templates = Jinja2Templates(directory=str(Path(__file__).resolve().parent.parent / 'templates'))
|
||||
|
||||
|
|
@ -131,7 +131,7 @@ async def theme_suggest(
|
|||
# Optional rate limit using app helper if available
|
||||
rl_result = None
|
||||
try:
|
||||
from ..app import rate_limit_check # type: ignore
|
||||
from ..app import rate_limit_check
|
||||
rl_result = rate_limit_check(request, "suggest")
|
||||
except HTTPException as http_ex: # propagate 429 with headers
|
||||
raise http_ex
|
||||
|
|
@ -231,7 +231,7 @@ async def theme_status():
|
|||
yaml_file_count = 0
|
||||
if yaml_catalog_exists:
|
||||
try:
|
||||
yaml_file_count = len([p for p in CATALOG_DIR.iterdir() if p.suffix == ".yml"]) # type: ignore[arg-type]
|
||||
yaml_file_count = len([p for p in CATALOG_DIR.iterdir() if p.suffix == ".yml"])
|
||||
except Exception:
|
||||
yaml_file_count = -1
|
||||
tagged_time = _load_tag_flag_time()
|
||||
|
|
@ -291,28 +291,6 @@ def _diag_enabled() -> bool:
|
|||
return (os.getenv("WEB_THEME_PICKER_DIAGNOSTICS") or "").strip().lower() in {"1", "true", "yes", "on"}
|
||||
|
||||
|
||||
@router.get("/picker", response_class=HTMLResponse)
|
||||
async def theme_picker_page(request: Request):
|
||||
"""Render the theme picker shell.
|
||||
|
||||
Dynamic data (list, detail) loads via fragment endpoints. We still inject
|
||||
known archetype list for the filter select so it is populated on initial load.
|
||||
"""
|
||||
archetypes: list[str] = []
|
||||
try:
|
||||
idx = load_index()
|
||||
archetypes = sorted({t.deck_archetype for t in idx.catalog.themes if t.deck_archetype}) # type: ignore[arg-type]
|
||||
except Exception:
|
||||
archetypes = []
|
||||
return _templates.TemplateResponse(
|
||||
"themes/picker.html",
|
||||
{
|
||||
"request": request,
|
||||
"archetypes": archetypes,
|
||||
"theme_picker_diagnostics": _diag_enabled(),
|
||||
},
|
||||
)
|
||||
|
||||
@router.get("/metrics")
|
||||
async def theme_metrics():
|
||||
if not _diag_enabled():
|
||||
|
|
@ -569,7 +547,7 @@ async def theme_yaml(theme_id: str):
|
|||
raise HTTPException(status_code=404, detail="yaml_not_found")
|
||||
# Reconstruct minimal YAML (we have dict already)
|
||||
import yaml as _yaml # local import to keep top-level lean
|
||||
text = _yaml.safe_dump(y, sort_keys=False) # type: ignore
|
||||
text = _yaml.safe_dump(y, sort_keys=False)
|
||||
headers = {"Content-Type": "text/plain; charset=utf-8"}
|
||||
return HTMLResponse(text, headers=headers)
|
||||
|
||||
|
|
@ -653,7 +631,7 @@ async def api_theme_search(
|
|||
prefix: list[dict[str, Any]] = []
|
||||
substr: list[dict[str, Any]] = []
|
||||
seen: set[str] = set()
|
||||
themes_iter = list(idx.catalog.themes) # type: ignore[attr-defined]
|
||||
themes_iter = list(idx.catalog.themes)
|
||||
# Phase 1 + 2: exact / prefix
|
||||
for t in themes_iter:
|
||||
name = t.theme
|
||||
|
|
@ -746,89 +724,9 @@ async def api_theme_preview(
|
|||
return JSONResponse({"ok": True, "preview": payload})
|
||||
|
||||
|
||||
@router.get("/fragment/preview/{theme_id}", response_class=HTMLResponse)
|
||||
async def theme_preview_fragment(
|
||||
theme_id: str,
|
||||
limit: int = Query(12, ge=1, le=30),
|
||||
colors: str | None = None,
|
||||
commander: str | None = None,
|
||||
suppress_curated: bool = Query(False, description="If true, omit curated example cards/commanders from the sample area (used on detail page to avoid duplication)"),
|
||||
minimal: bool = Query(False, description="Minimal inline variant (no header/controls/rationale – used in detail page collapsible preview)"),
|
||||
request: Request = None,
|
||||
):
|
||||
"""Return HTML fragment for theme preview with caching headers.
|
||||
|
||||
Adds ETag and Last-Modified headers (no strong caching – enables conditional GET / 304).
|
||||
ETag composed of catalog index etag + stable hash of preview payload (theme id + limit + commander).
|
||||
"""
|
||||
try:
|
||||
payload = get_theme_preview(theme_id, limit=limit, colors=colors, commander=commander)
|
||||
except KeyError:
|
||||
return HTMLResponse("<div class='error'>Theme not found.</div>", status_code=404)
|
||||
# Load example commanders (authoritative list) from catalog detail for legality instead of inferring
|
||||
example_commanders: list[str] = []
|
||||
synergy_commanders: list[str] = []
|
||||
try:
|
||||
idx = load_index()
|
||||
slug = slugify(theme_id)
|
||||
entry = idx.slug_to_entry.get(slug)
|
||||
if entry:
|
||||
detail = project_detail(slug, entry, idx.slug_to_yaml, uncapped=False)
|
||||
example_commanders = [c for c in (detail.get("example_commanders") or []) if isinstance(c, str)]
|
||||
synergy_commanders_raw = [c for c in (detail.get("synergy_commanders") or []) if isinstance(c, str)]
|
||||
# De-duplicate any overlap with example commanders while preserving order
|
||||
seen = set(example_commanders)
|
||||
for c in synergy_commanders_raw:
|
||||
if c not in seen:
|
||||
synergy_commanders.append(c)
|
||||
seen.add(c)
|
||||
except Exception:
|
||||
example_commanders = []
|
||||
synergy_commanders = []
|
||||
# Build ETag (use catalog etag + hash of core identifying fields to reflect underlying data drift)
|
||||
import hashlib
|
||||
import json as _json
|
||||
import time as _time
|
||||
try:
|
||||
idx = load_index()
|
||||
catalog_tag = idx.etag
|
||||
except Exception:
|
||||
catalog_tag = "unknown"
|
||||
hash_src = _json.dumps({
|
||||
"theme": theme_id,
|
||||
"limit": limit,
|
||||
"commander": commander,
|
||||
"sample": payload.get("sample", [])[:3], # small slice for stability & speed
|
||||
"v": 1,
|
||||
}, sort_keys=True).encode("utf-8")
|
||||
etag = "pv-" + hashlib.sha256(hash_src).hexdigest()[:20] + f"-{catalog_tag}"
|
||||
# Conditional request support
|
||||
if request is not None:
|
||||
inm = request.headers.get("if-none-match")
|
||||
if inm and inm == etag:
|
||||
# 304 Not Modified – FastAPI HTMLResponse with empty body & headers
|
||||
resp = HTMLResponse(status_code=304, content="")
|
||||
resp.headers["ETag"] = etag
|
||||
from email.utils import formatdate as _fmtdate
|
||||
resp.headers["Last-Modified"] = _fmtdate(timeval=_time.time(), usegmt=True)
|
||||
resp.headers["Cache-Control"] = "no-cache"
|
||||
return resp
|
||||
ctx = {
|
||||
"request": request,
|
||||
"preview": payload,
|
||||
"example_commanders": example_commanders,
|
||||
"synergy_commanders": synergy_commanders,
|
||||
"theme_id": theme_id,
|
||||
"etag": etag,
|
||||
"suppress_curated": suppress_curated,
|
||||
"minimal": minimal,
|
||||
}
|
||||
resp = _templates.TemplateResponse("themes/preview_fragment.html", ctx)
|
||||
resp.headers["ETag"] = etag
|
||||
from email.utils import formatdate as _fmtdate
|
||||
resp.headers["Last-Modified"] = _fmtdate(timeval=_time.time(), usegmt=True)
|
||||
resp.headers["Cache-Control"] = "no-cache"
|
||||
return resp
|
||||
|
||||
@router.get("/fragment/list", response_class=HTMLResponse)
|
||||
|
||||
|
||||
# --- Preview Export Endpoints (CSV / JSON) ---
|
||||
|
|
|
|||
|
|
@ -202,7 +202,7 @@ def commander_hover_context(
|
|||
from .summary_utils import format_theme_label, format_theme_list
|
||||
except Exception:
|
||||
# Fallbacks in the unlikely event of circular import issues
|
||||
def format_theme_label(value: Any) -> str: # type: ignore[redef]
|
||||
def format_theme_label(value: Any) -> str:
|
||||
text = str(value or "").strip().replace("_", " ")
|
||||
if not text:
|
||||
return ""
|
||||
|
|
@ -214,10 +214,10 @@ def commander_hover_context(
|
|||
parts.append(chunk[:1].upper() + chunk[1:].lower())
|
||||
return " ".join(parts)
|
||||
|
||||
def format_theme_list(values: Iterable[Any]) -> list[str]: # type: ignore[redef]
|
||||
def format_theme_list(values: Iterable[Any]) -> list[str]:
|
||||
seen: set[str] = set()
|
||||
result: list[str] = []
|
||||
for raw in values or []: # type: ignore[arg-type]
|
||||
for raw in values or []:
|
||||
label = format_theme_label(raw)
|
||||
if not label or len(label) <= 1:
|
||||
continue
|
||||
|
|
@ -310,13 +310,30 @@ def commander_hover_context(
|
|||
|
||||
raw_color_identity = combined_info.get("color_identity") if combined_info else None
|
||||
commander_color_identity: list[str] = []
|
||||
|
||||
# If we have a combined commander (partner/background), use its color identity
|
||||
if isinstance(raw_color_identity, (list, tuple, set)):
|
||||
for item in raw_color_identity:
|
||||
token = str(item).strip().upper()
|
||||
if token:
|
||||
commander_color_identity.append(token)
|
||||
|
||||
# M7: For non-partner commanders, also check summary.colors for color identity
|
||||
# For regular commanders (no partner/background), look up from commander catalog first
|
||||
if not commander_color_identity and not has_combined and commander_name:
|
||||
try:
|
||||
from .commander_catalog_loader import find_commander_record
|
||||
record = find_commander_record(commander_name)
|
||||
if record and hasattr(record, 'color_identity'):
|
||||
raw_ci = record.color_identity
|
||||
if isinstance(raw_ci, (list, tuple, set)):
|
||||
for item in raw_ci:
|
||||
token = str(item).strip().upper()
|
||||
if token:
|
||||
commander_color_identity.append(token)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Fallback: check summary.colors if we still don't have color identity
|
||||
if not commander_color_identity and not has_combined and isinstance(summary, dict):
|
||||
summary_colors = summary.get("colors")
|
||||
if isinstance(summary_colors, (list, tuple, set)):
|
||||
|
|
@ -403,7 +420,7 @@ def step5_ctx_from_result(
|
|||
else:
|
||||
entry = {}
|
||||
try:
|
||||
entry.update(vars(item)) # type: ignore[arg-type]
|
||||
entry.update(vars(item))
|
||||
except Exception:
|
||||
pass
|
||||
# Preserve common attributes when vars() empty
|
||||
|
|
|
|||
|
|
@ -359,7 +359,7 @@ def _global_prune_disallowed_pool(b: DeckBuilder) -> None:
|
|||
drop_idx = tags_series.apply(lambda lst, nd=needles: _has_any(lst, nd))
|
||||
mask_keep = [mk and (not di) for mk, di in zip(mask_keep, drop_idx.tolist())]
|
||||
try:
|
||||
import pandas as _pd # type: ignore
|
||||
import pandas as _pd
|
||||
mask_keep = _pd.Series(mask_keep, index=work.index)
|
||||
except Exception:
|
||||
pass
|
||||
|
|
@ -480,7 +480,7 @@ def commander_candidates(query: str, limit: int = 10) -> List[Tuple[str, int, Li
|
|||
tmp = DeckBuilder()
|
||||
try:
|
||||
if hasattr(tmp, '_normalize_commander_query'):
|
||||
query = tmp._normalize_commander_query(query) # type: ignore[attr-defined]
|
||||
query = tmp._normalize_commander_query(query)
|
||||
else:
|
||||
# Light fallback: basic title case
|
||||
query = ' '.join([w[:1].upper() + w[1:].lower() if w else w for w in str(query).split(' ')])
|
||||
|
|
@ -653,7 +653,7 @@ def commander_select(name: str) -> Dict[str, Any]:
|
|||
if row.empty:
|
||||
try:
|
||||
if hasattr(tmp, '_normalize_commander_query'):
|
||||
name2 = tmp._normalize_commander_query(name) # type: ignore[attr-defined]
|
||||
name2 = tmp._normalize_commander_query(name)
|
||||
else:
|
||||
name2 = ' '.join([w[:1].upper() + w[1:].lower() if w else w for w in str(name).split(' ')])
|
||||
row = df[df["name"] == name2]
|
||||
|
|
@ -1288,8 +1288,8 @@ def _ensure_setup_ready(out, force: bool = False) -> None:
|
|||
pass
|
||||
# Bust theme-related in-memory caches so new catalog reflects immediately
|
||||
try:
|
||||
from .theme_catalog_loader import bust_filter_cache # type: ignore
|
||||
from .theme_preview import bust_preview_cache # type: ignore
|
||||
from .theme_catalog_loader import bust_filter_cache
|
||||
from .theme_preview import bust_preview_cache
|
||||
bust_filter_cache("catalog_refresh")
|
||||
bust_preview_cache("catalog_refresh")
|
||||
try:
|
||||
|
|
@ -1327,7 +1327,7 @@ def _ensure_setup_ready(out, force: bool = False) -> None:
|
|||
|
||||
try:
|
||||
# M4 (Parquet Migration): Check for processed Parquet file instead of CSV
|
||||
from path_util import get_processed_cards_path # type: ignore
|
||||
from path_util import get_processed_cards_path
|
||||
cards_path = get_processed_cards_path()
|
||||
flag_path = os.path.join('csv_files', '.tagging_complete.json')
|
||||
auto_setup_enabled = _is_truthy_env('WEB_AUTO_SETUP', '1')
|
||||
|
|
@ -1416,7 +1416,7 @@ def _ensure_setup_ready(out, force: bool = False) -> None:
|
|||
_write_status({"running": True, "phase": "setup", "message": "GitHub download failed, running local setup...", "percent": 0})
|
||||
|
||||
try:
|
||||
from file_setup.setup import initial_setup # type: ignore
|
||||
from file_setup.setup import initial_setup
|
||||
# Always run initial_setup when forced or when cards are missing/stale
|
||||
initial_setup()
|
||||
except Exception as e:
|
||||
|
|
@ -1425,7 +1425,7 @@ def _ensure_setup_ready(out, force: bool = False) -> None:
|
|||
return
|
||||
# M4 (Parquet Migration): Use unified run_tagging with parallel support
|
||||
try:
|
||||
from tagging import tagger as _tagger # type: ignore
|
||||
from tagging import tagger as _tagger
|
||||
use_parallel = str(os.getenv('WEB_TAG_PARALLEL', '1')).strip().lower() in {"1","true","yes","on"}
|
||||
max_workers_env = os.getenv('WEB_TAG_WORKERS')
|
||||
try:
|
||||
|
|
@ -1466,7 +1466,7 @@ def _ensure_setup_ready(out, force: bool = False) -> None:
|
|||
try:
|
||||
_write_status({"running": True, "phase": "aggregating", "message": "Consolidating card data...", "percent": 90})
|
||||
out("Aggregating card CSVs into Parquet files...")
|
||||
from file_setup.card_aggregator import CardAggregator # type: ignore
|
||||
from file_setup.card_aggregator import CardAggregator
|
||||
aggregator = CardAggregator()
|
||||
|
||||
# Aggregate all_cards.parquet
|
||||
|
|
@ -1474,7 +1474,7 @@ def _ensure_setup_ready(out, force: bool = False) -> None:
|
|||
out(f"Aggregated {stats['total_cards']} cards into all_cards.parquet ({stats['file_size_mb']} MB)")
|
||||
|
||||
# Convert commander_cards.csv and background_cards.csv to Parquet
|
||||
import pandas as pd # type: ignore
|
||||
import pandas as pd
|
||||
|
||||
# Convert commander_cards.csv
|
||||
commander_csv = 'csv_files/commander_cards.csv'
|
||||
|
|
@ -1524,8 +1524,8 @@ def _ensure_setup_ready(out, force: bool = False) -> None:
|
|||
# Generate / refresh theme catalog (JSON + per-theme YAML) BEFORE marking done so UI sees progress
|
||||
_refresh_theme_catalog(out, force=True, fast_path=False)
|
||||
try:
|
||||
from .theme_catalog_loader import bust_filter_cache # type: ignore
|
||||
from .theme_preview import bust_preview_cache # type: ignore
|
||||
from .theme_catalog_loader import bust_filter_cache
|
||||
from .theme_preview import bust_preview_cache
|
||||
bust_filter_cache("tagging_complete")
|
||||
bust_preview_cache("tagging_complete")
|
||||
except Exception:
|
||||
|
|
@ -1721,19 +1721,19 @@ def run_build(commander: str, tags: List[str], bracket: int, ideals: Dict[str, i
|
|||
# Owned/Prefer-owned integration (optional for headless runs)
|
||||
try:
|
||||
if use_owned_only:
|
||||
b.use_owned_only = True # type: ignore[attr-defined]
|
||||
b.use_owned_only = True
|
||||
# Prefer explicit owned_names list if provided; else let builder discover from files
|
||||
if owned_names:
|
||||
try:
|
||||
b.owned_card_names = set(str(n).strip() for n in owned_names if str(n).strip()) # type: ignore[attr-defined]
|
||||
b.owned_card_names = set(str(n).strip() for n in owned_names if str(n).strip())
|
||||
except Exception:
|
||||
b.owned_card_names = set() # type: ignore[attr-defined]
|
||||
b.owned_card_names = set()
|
||||
# Soft preference flag does not filter; only biases selection order
|
||||
if prefer_owned:
|
||||
try:
|
||||
b.prefer_owned = True # type: ignore[attr-defined]
|
||||
b.prefer_owned = True
|
||||
if owned_names and not getattr(b, 'owned_card_names', None):
|
||||
b.owned_card_names = set(str(n).strip() for n in owned_names if str(n).strip()) # type: ignore[attr-defined]
|
||||
b.owned_card_names = set(str(n).strip() for n in owned_names if str(n).strip())
|
||||
except Exception:
|
||||
pass
|
||||
except Exception:
|
||||
|
|
@ -1751,13 +1751,13 @@ def run_build(commander: str, tags: List[str], bracket: int, ideals: Dict[str, i
|
|||
# Thread combo preferences (if provided)
|
||||
try:
|
||||
if prefer_combos is not None:
|
||||
b.prefer_combos = bool(prefer_combos) # type: ignore[attr-defined]
|
||||
b.prefer_combos = bool(prefer_combos)
|
||||
if combo_target_count is not None:
|
||||
b.combo_target_count = int(combo_target_count) # type: ignore[attr-defined]
|
||||
b.combo_target_count = int(combo_target_count)
|
||||
if combo_balance:
|
||||
bal = str(combo_balance).strip().lower()
|
||||
if bal in ('early','late','mix'):
|
||||
b.combo_balance = bal # type: ignore[attr-defined]
|
||||
b.combo_balance = bal
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
|
@ -1934,7 +1934,7 @@ def run_build(commander: str, tags: List[str], bracket: int, ideals: Dict[str, i
|
|||
except Exception:
|
||||
pass
|
||||
if hasattr(b, 'export_decklist_csv'):
|
||||
csv_path = b.export_decklist_csv() # type: ignore[attr-defined]
|
||||
csv_path = b.export_decklist_csv()
|
||||
except Exception as e:
|
||||
out(f"CSV export failed: {e}")
|
||||
try:
|
||||
|
|
@ -1942,7 +1942,7 @@ def run_build(commander: str, tags: List[str], bracket: int, ideals: Dict[str, i
|
|||
# Try to mirror build_deck_full behavior by displaying the contents
|
||||
import os as _os
|
||||
base, _ext = _os.path.splitext(_os.path.basename(csv_path)) if csv_path else (f"deck_{b.timestamp}", "")
|
||||
txt_path = b.export_decklist_text(filename=base + '.txt') # type: ignore[attr-defined]
|
||||
txt_path = b.export_decklist_text(filename=base + '.txt')
|
||||
try:
|
||||
b._display_txt_contents(txt_path)
|
||||
except Exception:
|
||||
|
|
@ -1950,7 +1950,7 @@ def run_build(commander: str, tags: List[str], bracket: int, ideals: Dict[str, i
|
|||
# Compute bracket compliance and save JSON alongside exports
|
||||
try:
|
||||
if hasattr(b, 'compute_and_print_compliance'):
|
||||
rep0 = b.compute_and_print_compliance(base_stem=base) # type: ignore[attr-defined]
|
||||
rep0 = b.compute_and_print_compliance(base_stem=base)
|
||||
# Attach planning preview (no mutation) and only auto-enforce if explicitly enabled
|
||||
rep0 = _attach_enforcement_plan(b, rep0)
|
||||
try:
|
||||
|
|
@ -1959,7 +1959,7 @@ def run_build(commander: str, tags: List[str], bracket: int, ideals: Dict[str, i
|
|||
except Exception:
|
||||
_auto = False
|
||||
if _auto and isinstance(rep0, dict) and rep0.get('overall') == 'FAIL' and hasattr(b, 'enforce_and_reexport'):
|
||||
b.enforce_and_reexport(base_stem=base, mode='auto') # type: ignore[attr-defined]
|
||||
b.enforce_and_reexport(base_stem=base, mode='auto')
|
||||
except Exception:
|
||||
pass
|
||||
# Load compliance JSON for UI consumption
|
||||
|
|
@ -1981,7 +1981,7 @@ def run_build(commander: str, tags: List[str], bracket: int, ideals: Dict[str, i
|
|||
# Build structured summary for UI
|
||||
try:
|
||||
if hasattr(b, 'build_deck_summary'):
|
||||
summary = b.build_deck_summary() # type: ignore[attr-defined]
|
||||
summary = b.build_deck_summary()
|
||||
except Exception:
|
||||
summary = None
|
||||
# Write sidecar summary JSON next to CSV (if available)
|
||||
|
|
@ -1999,7 +1999,7 @@ def run_build(commander: str, tags: List[str], bracket: int, ideals: Dict[str, i
|
|||
"txt": txt_path,
|
||||
}
|
||||
try:
|
||||
commander_meta = b.get_commander_export_metadata() # type: ignore[attr-defined]
|
||||
commander_meta = b.get_commander_export_metadata()
|
||||
except Exception:
|
||||
commander_meta = {}
|
||||
names = commander_meta.get("commander_names") or []
|
||||
|
|
@ -2383,21 +2383,21 @@ def _apply_combined_commander_to_builder(builder: DeckBuilder, combined: Any) ->
|
|||
"""Attach combined commander metadata to the builder."""
|
||||
|
||||
try:
|
||||
builder.combined_commander = combined # type: ignore[attr-defined]
|
||||
builder.combined_commander = combined
|
||||
except Exception:
|
||||
pass
|
||||
try:
|
||||
builder.partner_mode = getattr(combined, "partner_mode", None) # type: ignore[attr-defined]
|
||||
builder.partner_mode = getattr(combined, "partner_mode", None)
|
||||
except Exception:
|
||||
pass
|
||||
try:
|
||||
builder.secondary_commander = getattr(combined, "secondary_name", None) # type: ignore[attr-defined]
|
||||
builder.secondary_commander = getattr(combined, "secondary_name", None)
|
||||
except Exception:
|
||||
pass
|
||||
try:
|
||||
builder.combined_color_identity = getattr(combined, "color_identity", None) # type: ignore[attr-defined]
|
||||
builder.combined_theme_tags = getattr(combined, "theme_tags", None) # type: ignore[attr-defined]
|
||||
builder.partner_warnings = getattr(combined, "warnings", None) # type: ignore[attr-defined]
|
||||
builder.combined_color_identity = getattr(combined, "color_identity", None)
|
||||
builder.combined_theme_tags = getattr(combined, "theme_tags", None)
|
||||
builder.partner_warnings = getattr(combined, "warnings", None)
|
||||
except Exception:
|
||||
pass
|
||||
commander_dict = getattr(builder, "commander_dict", None)
|
||||
|
|
@ -2583,17 +2583,17 @@ def start_build_ctx(
|
|||
# Owned-only / prefer-owned (if requested)
|
||||
try:
|
||||
if use_owned_only:
|
||||
b.use_owned_only = True # type: ignore[attr-defined]
|
||||
b.use_owned_only = True
|
||||
if owned_names:
|
||||
try:
|
||||
b.owned_card_names = set(str(n).strip() for n in owned_names if str(n).strip()) # type: ignore[attr-defined]
|
||||
b.owned_card_names = set(str(n).strip() for n in owned_names if str(n).strip())
|
||||
except Exception:
|
||||
b.owned_card_names = set() # type: ignore[attr-defined]
|
||||
b.owned_card_names = set()
|
||||
if prefer_owned:
|
||||
try:
|
||||
b.prefer_owned = True # type: ignore[attr-defined]
|
||||
b.prefer_owned = True
|
||||
if owned_names and not getattr(b, 'owned_card_names', None):
|
||||
b.owned_card_names = set(str(n).strip() for n in owned_names if str(n).strip()) # type: ignore[attr-defined]
|
||||
b.owned_card_names = set(str(n).strip() for n in owned_names if str(n).strip())
|
||||
except Exception:
|
||||
pass
|
||||
except Exception:
|
||||
|
|
@ -2646,14 +2646,14 @@ def start_build_ctx(
|
|||
# Thread combo config
|
||||
try:
|
||||
if combo_target_count is not None:
|
||||
b.combo_target_count = int(combo_target_count) # type: ignore[attr-defined]
|
||||
b.combo_target_count = int(combo_target_count)
|
||||
except Exception:
|
||||
pass
|
||||
try:
|
||||
if combo_balance:
|
||||
bal = str(combo_balance).strip().lower()
|
||||
if bal in ('early','late','mix'):
|
||||
b.combo_balance = bal # type: ignore[attr-defined]
|
||||
b.combo_balance = bal
|
||||
except Exception:
|
||||
pass
|
||||
# Stages
|
||||
|
|
@ -2735,23 +2735,23 @@ def run_stage(ctx: Dict[str, Any], rerun: bool = False, show_skipped: bool = Fal
|
|||
pass
|
||||
if not ctx.get("txt_path") and hasattr(b, 'export_decklist_text'):
|
||||
try:
|
||||
ctx["csv_path"] = b.export_decklist_csv() # type: ignore[attr-defined]
|
||||
ctx["csv_path"] = b.export_decklist_csv()
|
||||
except Exception as e:
|
||||
logs.append(f"CSV export failed: {e}")
|
||||
if not ctx.get("txt_path") and hasattr(b, 'export_decklist_text'):
|
||||
try:
|
||||
import os as _os
|
||||
base, _ext = _os.path.splitext(_os.path.basename(ctx.get("csv_path") or f"deck_{b.timestamp}.csv"))
|
||||
ctx["txt_path"] = b.export_decklist_text(filename=base + '.txt') # type: ignore[attr-defined]
|
||||
ctx["txt_path"] = b.export_decklist_text(filename=base + '.txt')
|
||||
# Export the run configuration JSON for manual builds
|
||||
try:
|
||||
b.export_run_config_json(directory='config', filename=base + '.json') # type: ignore[attr-defined]
|
||||
b.export_run_config_json(directory='config', filename=base + '.json')
|
||||
except Exception:
|
||||
pass
|
||||
# Compute bracket compliance and save JSON alongside exports
|
||||
try:
|
||||
if hasattr(b, 'compute_and_print_compliance'):
|
||||
rep0 = b.compute_and_print_compliance(base_stem=base) # type: ignore[attr-defined]
|
||||
rep0 = b.compute_and_print_compliance(base_stem=base)
|
||||
rep0 = _attach_enforcement_plan(b, rep0)
|
||||
try:
|
||||
import os as __os
|
||||
|
|
@ -2759,7 +2759,7 @@ def run_stage(ctx: Dict[str, Any], rerun: bool = False, show_skipped: bool = Fal
|
|||
except Exception:
|
||||
_auto = False
|
||||
if _auto and isinstance(rep0, dict) and rep0.get('overall') == 'FAIL' and hasattr(b, 'enforce_and_reexport'):
|
||||
b.enforce_and_reexport(base_stem=base, mode='auto') # type: ignore[attr-defined]
|
||||
b.enforce_and_reexport(base_stem=base, mode='auto')
|
||||
except Exception:
|
||||
pass
|
||||
# Load compliance JSON for UI consumption
|
||||
|
|
@ -2811,7 +2811,7 @@ def run_stage(ctx: Dict[str, Any], rerun: bool = False, show_skipped: bool = Fal
|
|||
summary = None
|
||||
try:
|
||||
if hasattr(b, 'build_deck_summary'):
|
||||
summary = b.build_deck_summary() # type: ignore[attr-defined]
|
||||
summary = b.build_deck_summary()
|
||||
except Exception:
|
||||
summary = None
|
||||
# Write sidecar summary JSON next to CSV (if available)
|
||||
|
|
@ -2830,7 +2830,7 @@ def run_stage(ctx: Dict[str, Any], rerun: bool = False, show_skipped: bool = Fal
|
|||
"txt": ctx.get("txt_path"),
|
||||
}
|
||||
try:
|
||||
commander_meta = b.get_commander_export_metadata() # type: ignore[attr-defined]
|
||||
commander_meta = b.get_commander_export_metadata()
|
||||
except Exception:
|
||||
commander_meta = {}
|
||||
names = commander_meta.get("commander_names") or []
|
||||
|
|
@ -2890,12 +2890,12 @@ def run_stage(ctx: Dict[str, Any], rerun: bool = False, show_skipped: bool = Fal
|
|||
comp_now = None
|
||||
try:
|
||||
if hasattr(b, 'compute_and_print_compliance'):
|
||||
comp_now = b.compute_and_print_compliance(base_stem=None) # type: ignore[attr-defined]
|
||||
comp_now = b.compute_and_print_compliance(base_stem=None)
|
||||
except Exception:
|
||||
comp_now = None
|
||||
try:
|
||||
if comp_now:
|
||||
comp_now = _attach_enforcement_plan(b, comp_now) # type: ignore[attr-defined]
|
||||
comp_now = _attach_enforcement_plan(b, comp_now)
|
||||
except Exception:
|
||||
pass
|
||||
# If still FAIL, return the saved result without advancing or rerunning
|
||||
|
|
@ -3407,7 +3407,7 @@ def run_stage(ctx: Dict[str, Any], rerun: bool = False, show_skipped: bool = Fal
|
|||
comp = None
|
||||
try:
|
||||
if hasattr(b, 'compute_and_print_compliance'):
|
||||
comp = b.compute_and_print_compliance(base_stem=None) # type: ignore[attr-defined]
|
||||
comp = b.compute_and_print_compliance(base_stem=None)
|
||||
except Exception:
|
||||
comp = None
|
||||
try:
|
||||
|
|
@ -3508,7 +3508,7 @@ def run_stage(ctx: Dict[str, Any], rerun: bool = False, show_skipped: bool = Fal
|
|||
comp = None
|
||||
try:
|
||||
if hasattr(b, 'compute_and_print_compliance'):
|
||||
comp = b.compute_and_print_compliance(base_stem=None) # type: ignore[attr-defined]
|
||||
comp = b.compute_and_print_compliance(base_stem=None)
|
||||
except Exception:
|
||||
comp = None
|
||||
try:
|
||||
|
|
@ -3575,7 +3575,7 @@ def run_stage(ctx: Dict[str, Any], rerun: bool = False, show_skipped: bool = Fal
|
|||
comp = None
|
||||
try:
|
||||
if hasattr(b, 'compute_and_print_compliance'):
|
||||
comp = b.compute_and_print_compliance(base_stem=None) # type: ignore[attr-defined]
|
||||
comp = b.compute_and_print_compliance(base_stem=None)
|
||||
except Exception:
|
||||
comp = None
|
||||
try:
|
||||
|
|
@ -3617,23 +3617,23 @@ def run_stage(ctx: Dict[str, Any], rerun: bool = False, show_skipped: bool = Fal
|
|||
pass
|
||||
if not ctx.get("csv_path") and hasattr(b, 'export_decklist_csv'):
|
||||
try:
|
||||
ctx["csv_path"] = b.export_decklist_csv() # type: ignore[attr-defined]
|
||||
ctx["csv_path"] = b.export_decklist_csv()
|
||||
except Exception as e:
|
||||
logs.append(f"CSV export failed: {e}")
|
||||
if not ctx.get("txt_path") and hasattr(b, 'export_decklist_text'):
|
||||
try:
|
||||
import os as _os
|
||||
base, _ext = _os.path.splitext(_os.path.basename(ctx.get("csv_path") or f"deck_{b.timestamp}.csv"))
|
||||
ctx["txt_path"] = b.export_decklist_text(filename=base + '.txt') # type: ignore[attr-defined]
|
||||
ctx["txt_path"] = b.export_decklist_text(filename=base + '.txt')
|
||||
# Export the run configuration JSON for manual builds
|
||||
try:
|
||||
b.export_run_config_json(directory='config', filename=base + '.json') # type: ignore[attr-defined]
|
||||
b.export_run_config_json(directory='config', filename=base + '.json')
|
||||
except Exception:
|
||||
pass
|
||||
# Compute bracket compliance and save JSON alongside exports
|
||||
try:
|
||||
if hasattr(b, 'compute_and_print_compliance'):
|
||||
rep0 = b.compute_and_print_compliance(base_stem=base) # type: ignore[attr-defined]
|
||||
rep0 = b.compute_and_print_compliance(base_stem=base)
|
||||
rep0 = _attach_enforcement_plan(b, rep0)
|
||||
try:
|
||||
import os as __os
|
||||
|
|
@ -3641,7 +3641,7 @@ def run_stage(ctx: Dict[str, Any], rerun: bool = False, show_skipped: bool = Fal
|
|||
except Exception:
|
||||
_auto = False
|
||||
if _auto and isinstance(rep0, dict) and rep0.get('overall') == 'FAIL' and hasattr(b, 'enforce_and_reexport'):
|
||||
b.enforce_and_reexport(base_stem=base, mode='auto') # type: ignore[attr-defined]
|
||||
b.enforce_and_reexport(base_stem=base, mode='auto')
|
||||
except Exception:
|
||||
pass
|
||||
# Load compliance JSON for UI consumption
|
||||
|
|
@ -3662,7 +3662,7 @@ def run_stage(ctx: Dict[str, Any], rerun: bool = False, show_skipped: bool = Fal
|
|||
summary = None
|
||||
try:
|
||||
if hasattr(b, 'build_deck_summary'):
|
||||
summary = b.build_deck_summary() # type: ignore[attr-defined]
|
||||
summary = b.build_deck_summary()
|
||||
except Exception:
|
||||
summary = None
|
||||
# Write sidecar summary JSON next to CSV (if available)
|
||||
|
|
@ -3681,7 +3681,7 @@ def run_stage(ctx: Dict[str, Any], rerun: bool = False, show_skipped: bool = Fal
|
|||
"txt": ctx.get("txt_path"),
|
||||
}
|
||||
try:
|
||||
commander_meta = b.get_commander_export_metadata() # type: ignore[attr-defined]
|
||||
commander_meta = b.get_commander_export_metadata()
|
||||
except Exception:
|
||||
commander_meta = {}
|
||||
names = commander_meta.get("commander_names") or []
|
||||
|
|
|
|||
|
|
@ -362,7 +362,7 @@ def load_dataset(*, force: bool = False, refresh: bool = False) -> Optional[Part
|
|||
if allow_auto_refresh:
|
||||
_DATASET_REFRESH_ATTEMPTED = True
|
||||
try:
|
||||
from .orchestrator import _maybe_refresh_partner_synergy # type: ignore
|
||||
from .orchestrator import _maybe_refresh_partner_synergy
|
||||
|
||||
_maybe_refresh_partner_synergy(None, force=True)
|
||||
except Exception as refresh_exc: # pragma: no cover - best-effort
|
||||
|
|
|
|||
|
|
@ -21,7 +21,7 @@ import json
|
|||
import threading
|
||||
import math
|
||||
|
||||
from .preview_metrics import record_eviction # type: ignore
|
||||
from .preview_metrics import record_eviction
|
||||
|
||||
# Phase 2 extraction: adaptive TTL band policy moved into preview_policy
|
||||
from .preview_policy import (
|
||||
|
|
@ -30,7 +30,7 @@ from .preview_policy import (
|
|||
DEFAULT_TTL_MIN as _POLICY_TTL_MIN,
|
||||
DEFAULT_TTL_MAX as _POLICY_TTL_MAX,
|
||||
)
|
||||
from .preview_cache_backend import redis_store # type: ignore
|
||||
from .preview_cache_backend import redis_store
|
||||
|
||||
TTL_SECONDS = 600
|
||||
# Backward-compat variable names retained (tests may reference) mapping to policy constants
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue