fix(lint): improved type checking and code maintainability

This commit is contained in:
matt 2025-10-31 10:11:00 -07:00
parent 83fe527979
commit 40023e93b8
62 changed files with 187 additions and 197 deletions

View file

@ -9,9 +9,9 @@ This format follows Keep a Changelog principles and aims for Semantic Versioning
## [Unreleased] ## [Unreleased]
### Added ### Added
- **Code Quality Improvements**: Enhanced type checking and code quality standards - **Code Quality Tools**: Enhanced development tooling for maintainability
- Configured gradual strict mode for Python type checking - Automated utilities for code cleanup
- Created automated utilities for maintaining clean codebase - Improved type checking configuration
- **Card Image Caching**: Optional local image cache for faster card display - **Card Image Caching**: Optional local image cache for faster card display
- Downloads card images from Scryfall bulk data (respects API guidelines) - Downloads card images from Scryfall bulk data (respects API guidelines)
- Graceful fallback to Scryfall API for uncached images - Graceful fallback to Scryfall API for uncached images
@ -76,22 +76,19 @@ This format follows Keep a Changelog principles and aims for Semantic Versioning
### Changed ### Changed
- Migrated 5 templates to new component system (home, 404, 500, setup, commanders) - Migrated 5 templates to new component system (home, 404, 500, setup, commanders)
- **Type Checking Configuration**: Adjusted mypy settings for better developer experience - **Type Checking Configuration**: Improved Python code quality tooling
- Enabled gradual strict mode for incremental type safety improvements - Configured type checker for better error detection
- Configured per-module strict checks for new code - Optimized linting rules for development workflow
### Fixed ### Fixed
- **Code Quality**: Resolved numerous type checking warnings and improved code maintainability - **Code Quality**: Resolved type checking warnings and improved code maintainability
- Fixed critical type annotation bugs - Fixed type annotation inconsistencies
- Removed outdated type ignore comments - Cleaned up redundant code quality suppressions
- Corrected dictionary type definitions - Corrected configuration conflicts
### Removed ### Removed
_None_ _None_
### Fixed
_None_
### Performance ### Performance
- Hot reload for CSS/template changes (no Docker rebuild needed) - Hot reload for CSS/template changes (no Docker rebuild needed)
- Optional image caching reduces Scryfall API calls - Optional image caching reduces Scryfall API calls

View file

@ -3,12 +3,12 @@
## [Unreleased] ## [Unreleased]
### Summary ### Summary
Web UI improvements with Tailwind CSS migration, TypeScript conversion, component library, enhanced code quality standards, and optional card image caching for faster performance and better maintainability. Web UI improvements with Tailwind CSS migration, TypeScript conversion, component library, enhanced code quality tools, and optional card image caching for faster performance and better maintainability.
### Added ### Added
- **Code Quality Improvements**: Enhanced type checking and code quality standards - **Code Quality Tools**: Enhanced development tooling for maintainability
- Configured gradual strict mode for Python type checking - Automated utilities for code cleanup
- Created automated utilities for maintaining clean codebase - Improved type checking configuration
- **Card Image Caching**: Optional local image cache for faster card display - **Card Image Caching**: Optional local image cache for faster card display
- Downloads card images from Scryfall bulk data (respects API guidelines) - Downloads card images from Scryfall bulk data (respects API guidelines)
- Graceful fallback to Scryfall API for uncached images - Graceful fallback to Scryfall API for uncached images
@ -51,9 +51,9 @@ Web UI improvements with Tailwind CSS migration, TypeScript conversion, componen
- Hot reload enabled for templates and static files - Hot reload enabled for templates and static files
- Volume mounts for rapid iteration without rebuilds - Volume mounts for rapid iteration without rebuilds
- **Template Modernization**: Migrated templates to use component system - **Template Modernization**: Migrated templates to use component system
- **Type Checking Configuration**: Adjusted mypy settings for better developer experience - **Type Checking Configuration**: Improved Python code quality tooling
- Enabled gradual strict mode for incremental type safety improvements - Configured type checker for better error detection
- Configured per-module strict checks for new code - Optimized linting rules for development workflow
- **Intelligent Synergy Builder**: Analyze multiple builds and create optimized "best-of" deck - **Intelligent Synergy Builder**: Analyze multiple builds and create optimized "best-of" deck
- Scores cards by frequency (50%), EDHREC rank (25%), and theme tags (25%) - Scores cards by frequency (50%), EDHREC rank (25%), and theme tags (25%)
- 10% bonus for cards appearing in 80%+ of builds - 10% bonus for cards appearing in 80%+ of builds
@ -78,10 +78,10 @@ Web UI improvements with Tailwind CSS migration, TypeScript conversion, componen
_None_ _None_
### Fixed ### Fixed
- **Code Quality**: Resolved numerous type checking warnings and improved code maintainability - **Code Quality**: Resolved type checking warnings and improved code maintainability
- Fixed critical type annotation bugs - Fixed type annotation inconsistencies
- Removed outdated type ignore comments - Cleaned up redundant code quality suppressions
- Corrected dictionary type definitions - Corrected configuration conflicts
### Performance ### Performance
- Hot reload for CSS/template changes (no Docker rebuild needed) - Hot reload for CSS/template changes (no Docker rebuild needed)

View file

@ -88,12 +88,12 @@ def _candidate_pool_for_role(builder, role: str) -> List[Tuple[str, dict]]:
# Sort by edhrecRank then manaValue # Sort by edhrecRank then manaValue
try: try:
from . import builder_utils as bu from . import builder_utils as bu
sorted_df = bu.sort_by_priority(pool, ["edhrecRank", "manaValue"]) # type: ignore[attr-defined] sorted_df = bu.sort_by_priority(pool, ["edhrecRank", "manaValue"])
# Prefer-owned bias # Prefer-owned bias
if getattr(builder, "prefer_owned", False): if getattr(builder, "prefer_owned", False):
owned = getattr(builder, "owned_card_names", None) owned = getattr(builder, "owned_card_names", None)
if owned: if owned:
sorted_df = bu.prefer_owned_first(sorted_df, {str(n).lower() for n in owned}) # type: ignore[attr-defined] sorted_df = bu.prefer_owned_first(sorted_df, {str(n).lower() for n in owned})
except Exception: except Exception:
sorted_df = pool sorted_df = pool
@ -363,7 +363,7 @@ def enforce_bracket_compliance(builder, mode: str = "prompt") -> Dict:
break break
# Rank candidates: break the most combos first; break ties by worst desirability # Rank candidates: break the most combos first; break ties by worst desirability
cand_names = list(freq.keys()) cand_names = list(freq.keys())
cand_names.sort(key=lambda nm: (-int(freq.get(nm, 0)), _score(nm)), reverse=False) # type: ignore[arg-type] cand_names.sort(key=lambda nm: (-int(freq.get(nm, 0)), _score(nm)), reverse=False)
removed_any = False removed_any = False
for nm in cand_names: for nm in cand_names:
if nm in blocked: if nm in blocked:

View file

@ -17,7 +17,7 @@ from logging_util import get_logger
logger = get_logger(__name__) logger = get_logger(__name__)
try: # Optional pandas import for type checking without heavy dependency at runtime. try: # Optional pandas import for type checking without heavy dependency at runtime.
import pandas as _pd # type: ignore import pandas as _pd
except Exception: # pragma: no cover - tests provide DataFrame-like objects. except Exception: # pragma: no cover - tests provide DataFrame-like objects.
_pd = None # type: ignore _pd = None # type: ignore
@ -267,7 +267,7 @@ def _find_commander_row(df: Any, name: str | None):
if not target: if not target:
return None return None
if _pd is not None and isinstance(df, _pd.DataFrame): # type: ignore if _pd is not None and isinstance(df, _pd.DataFrame):
columns = [col for col in ("name", "faceName") if col in df.columns] columns = [col for col in ("name", "faceName") if col in df.columns]
for col in columns: for col in columns:
series = df[col].astype(str).str.casefold() series = df[col].astype(str).str.casefold()

View file

@ -885,7 +885,7 @@ def _filter_multi(df: pd.DataFrame, primary: Optional[str], secondary: Optional[
if index_map is None: if index_map is None:
_ensure_theme_tag_index(current_df) _ensure_theme_tag_index(current_df)
index_map = current_df.attrs.get("_ltag_index") or {} index_map = current_df.attrs.get("_ltag_index") or {}
return index_map # type: ignore[return-value] return index_map
index_map_all = _get_index_map(df) index_map_all = _get_index_map(df)
@ -1047,7 +1047,7 @@ def _check_constraints(candidate_count: int, constraints: Optional[Dict[str, Any
if not constraints: if not constraints:
return return
try: try:
req_min = constraints.get("require_min_candidates") # type: ignore[attr-defined] req_min = constraints.get("require_min_candidates")
except Exception: except Exception:
req_min = None req_min = None
if req_min is None: if req_min is None:
@ -1436,7 +1436,7 @@ def build_random_full_deck(
primary_choice_idx, secondary_choice_idx, tertiary_choice_idx = _resolve_theme_choices_for_headless(base.commander, base) primary_choice_idx, secondary_choice_idx, tertiary_choice_idx = _resolve_theme_choices_for_headless(base.commander, base)
try: try:
from headless_runner import run as _run # type: ignore from headless_runner import run as _run
except Exception as e: except Exception as e:
return RandomFullBuildResult( return RandomFullBuildResult(
seed=base.seed, seed=base.seed,
@ -1482,7 +1482,7 @@ def build_random_full_deck(
summary: Dict[str, Any] | None = None summary: Dict[str, Any] | None = None
try: try:
if hasattr(builder, 'build_deck_summary'): if hasattr(builder, 'build_deck_summary'):
summary = builder.build_deck_summary() # type: ignore[attr-defined] summary = builder.build_deck_summary()
except Exception: except Exception:
summary = None summary = None
@ -1559,7 +1559,7 @@ def build_random_full_deck(
if isinstance(custom_base, str) and custom_base.strip(): if isinstance(custom_base, str) and custom_base.strip():
meta_payload["name"] = custom_base.strip() meta_payload["name"] = custom_base.strip()
try: try:
commander_meta = builder.get_commander_export_metadata() # type: ignore[attr-defined] commander_meta = builder.get_commander_export_metadata()
except Exception: except Exception:
commander_meta = {} commander_meta = {}
names = commander_meta.get("commander_names") or [] names = commander_meta.get("commander_names") or []
@ -1589,8 +1589,8 @@ def build_random_full_deck(
try: try:
import os as _os import os as _os
import json as _json import json as _json
csv_path = getattr(builder, 'last_csv_path', None) # type: ignore[attr-defined] csv_path = getattr(builder, 'last_csv_path', None)
txt_path = getattr(builder, 'last_txt_path', None) # type: ignore[attr-defined] txt_path = getattr(builder, 'last_txt_path', None)
if csv_path and isinstance(csv_path, str): if csv_path and isinstance(csv_path, str):
base_path, _ = _os.path.splitext(csv_path) base_path, _ = _os.path.splitext(csv_path)
# If txt missing but expected, look for sibling # If txt missing but expected, look for sibling
@ -1608,7 +1608,7 @@ def build_random_full_deck(
# Compute compliance if not already saved # Compute compliance if not already saved
try: try:
if hasattr(builder, 'compute_and_print_compliance'): if hasattr(builder, 'compute_and_print_compliance'):
compliance = builder.compute_and_print_compliance(base_stem=_os.path.basename(base_path)) # type: ignore[attr-defined] compliance = builder.compute_and_print_compliance(base_stem=_os.path.basename(base_path))
except Exception: except Exception:
compliance = None compliance = None
# Write summary sidecar if missing # Write summary sidecar if missing
@ -1646,7 +1646,7 @@ def build_random_full_deck(
csv_path = existing_base csv_path = existing_base
base_path, _ = _os.path.splitext(csv_path) base_path, _ = _os.path.splitext(csv_path)
else: else:
tmp_csv = builder.export_decklist_csv() # type: ignore[attr-defined] tmp_csv = builder.export_decklist_csv()
stem_base, ext = _os.path.splitext(tmp_csv) stem_base, ext = _os.path.splitext(tmp_csv)
if stem_base.endswith('_1'): if stem_base.endswith('_1'):
original = stem_base[:-2] + ext original = stem_base[:-2] + ext
@ -1662,13 +1662,13 @@ def build_random_full_deck(
if _os.path.isfile(target_txt): if _os.path.isfile(target_txt):
txt_path = target_txt txt_path = target_txt
else: else:
tmp_txt = builder.export_decklist_text(filename=_os.path.basename(base_path) + '.txt') # type: ignore[attr-defined] tmp_txt = builder.export_decklist_text(filename=_os.path.basename(base_path) + '.txt')
if tmp_txt.endswith('_1.txt') and _os.path.isfile(target_txt): if tmp_txt.endswith('_1.txt') and _os.path.isfile(target_txt):
txt_path = target_txt txt_path = target_txt
else: else:
txt_path = tmp_txt txt_path = tmp_txt
if hasattr(builder, 'compute_and_print_compliance'): if hasattr(builder, 'compute_and_print_compliance'):
compliance = builder.compute_and_print_compliance(base_stem=_os.path.basename(base_path)) # type: ignore[attr-defined] compliance = builder.compute_and_print_compliance(base_stem=_os.path.basename(base_path))
if summary: if summary:
sidecar = base_path + '.summary.json' sidecar = base_path + '.summary.json'
if not _os.path.isfile(sidecar): if not _os.path.isfile(sidecar):

View file

@ -183,7 +183,7 @@ def _iter_json_themes(payload: object) -> Iterable[ThemeCatalogEntry]:
try: try:
from type_definitions_theme_catalog import ThemeCatalog # pragma: no cover - primary import path from type_definitions_theme_catalog import ThemeCatalog # pragma: no cover - primary import path
except ImportError: # pragma: no cover - fallback when running as package except ImportError: # pragma: no cover - fallback when running as package
from code.type_definitions_theme_catalog import ThemeCatalog # type: ignore from code.type_definitions_theme_catalog import ThemeCatalog
try: try:
catalog = ThemeCatalog.model_validate(payload) catalog = ThemeCatalog.model_validate(payload)

View file

@ -40,7 +40,7 @@ from typing import List, Dict, Any
# Third-party imports (optional) # Third-party imports (optional)
try: try:
import inquirer # type: ignore import inquirer
except Exception: except Exception:
inquirer = None # Fallback to simple input-based menu when unavailable inquirer = None # Fallback to simple input-based menu when unavailable
import pandas as pd import pandas as pd

View file

@ -40,7 +40,7 @@ from typing import List, Dict, Any
# Third-party imports (optional) # Third-party imports (optional)
try: try:
import inquirer # type: ignore import inquirer
except Exception: except Exception:
inquirer = None # Fallback to simple input-based menu when unavailable inquirer = None # Fallback to simple input-based menu when unavailable
import pandas as pd import pandas as pd

View file

@ -139,7 +139,7 @@ def _validate_commander_available(command_name: str) -> None:
return return
try: try:
from commander_exclusions import lookup_commander_detail as _lookup_commander_detail # type: ignore[import-not-found] from commander_exclusions import lookup_commander_detail as _lookup_commander_detail
except ImportError: # pragma: no cover except ImportError: # pragma: no cover
_lookup_commander_detail = None _lookup_commander_detail = None
@ -281,12 +281,12 @@ def run(
# Optional deterministic seed for Random Modes (does not affect core when unset) # Optional deterministic seed for Random Modes (does not affect core when unset)
try: try:
if seed is not None: if seed is not None:
builder.set_seed(seed) # type: ignore[attr-defined] builder.set_seed(seed)
except Exception: except Exception:
pass pass
# Mark this run as headless so builder can adjust exports and logging # Mark this run as headless so builder can adjust exports and logging
try: try:
builder.headless = True # type: ignore[attr-defined] builder.headless = True
except Exception: except Exception:
pass pass
@ -294,9 +294,9 @@ def run(
secondary_clean = (secondary_commander or "").strip() secondary_clean = (secondary_commander or "").strip()
background_clean = (background or "").strip() background_clean = (background or "").strip()
try: try:
builder.partner_feature_enabled = partner_feature_enabled # type: ignore[attr-defined] builder.partner_feature_enabled = partner_feature_enabled
builder.requested_secondary_commander = secondary_clean or None # type: ignore[attr-defined] builder.requested_secondary_commander = secondary_clean or None
builder.requested_background = background_clean or None # type: ignore[attr-defined] builder.requested_background = background_clean or None
except Exception: except Exception:
pass pass
@ -313,11 +313,11 @@ def run(
# Configure include/exclude settings (M1: Config + Validation + Persistence) # Configure include/exclude settings (M1: Config + Validation + Persistence)
try: try:
builder.include_cards = list(include_cards or []) # type: ignore[attr-defined] builder.include_cards = list(include_cards or [])
builder.exclude_cards = list(exclude_cards or []) # type: ignore[attr-defined] builder.exclude_cards = list(exclude_cards or [])
builder.enforcement_mode = enforcement_mode # type: ignore[attr-defined] builder.enforcement_mode = enforcement_mode
builder.allow_illegal = allow_illegal # type: ignore[attr-defined] builder.allow_illegal = allow_illegal
builder.fuzzy_matching = fuzzy_matching # type: ignore[attr-defined] builder.fuzzy_matching = fuzzy_matching
except Exception: except Exception:
pass pass
@ -336,16 +336,16 @@ def run(
) )
try: try:
builder.theme_match_mode = theme_resolution.mode # type: ignore[attr-defined] builder.theme_match_mode = theme_resolution.mode
builder.theme_catalog_version = theme_resolution.catalog_version # type: ignore[attr-defined] builder.theme_catalog_version = theme_resolution.catalog_version
builder.user_theme_requested = list(theme_resolution.requested) # type: ignore[attr-defined] builder.user_theme_requested = list(theme_resolution.requested)
builder.user_theme_resolved = list(theme_resolution.resolved) # type: ignore[attr-defined] builder.user_theme_resolved = list(theme_resolution.resolved)
builder.user_theme_matches = list(theme_resolution.matches) # type: ignore[attr-defined] builder.user_theme_matches = list(theme_resolution.matches)
builder.user_theme_unresolved = list(theme_resolution.unresolved) # type: ignore[attr-defined] builder.user_theme_unresolved = list(theme_resolution.unresolved)
builder.user_theme_fuzzy_corrections = dict(theme_resolution.fuzzy_corrections) # type: ignore[attr-defined] builder.user_theme_fuzzy_corrections = dict(theme_resolution.fuzzy_corrections)
builder.user_theme_resolution = theme_resolution # type: ignore[attr-defined] builder.user_theme_resolution = theme_resolution
if user_theme_weight is not None: if user_theme_weight is not None:
builder.user_theme_weight = float(user_theme_weight) # type: ignore[attr-defined] builder.user_theme_weight = float(user_theme_weight)
except Exception: except Exception:
pass pass
@ -356,7 +356,7 @@ def run(
ic: Dict[str, int] = {} ic: Dict[str, int] = {}
for k, v in ideal_counts.items(): for k, v in ideal_counts.items():
try: try:
iv = int(v) if v is not None else None # type: ignore iv = int(v) if v is not None else None
except Exception: except Exception:
continue continue
if iv is None: if iv is None:
@ -365,7 +365,7 @@ def run(
if k in {"ramp","lands","basic_lands","creatures","removal","wipes","card_advantage","protection"}: if k in {"ramp","lands","basic_lands","creatures","removal","wipes","card_advantage","protection"}:
ic[k] = iv ic[k] = iv
if ic: if ic:
builder.ideal_counts.update(ic) # type: ignore[attr-defined] builder.ideal_counts.update(ic)
except Exception: except Exception:
pass pass
builder.run_initial_setup() builder.run_initial_setup()
@ -518,24 +518,24 @@ def _apply_combined_commander_to_builder(builder: DeckBuilder, combined_commande
"""Attach combined commander metadata to the builder for downstream use.""" """Attach combined commander metadata to the builder for downstream use."""
try: try:
builder.combined_commander = combined_commander # type: ignore[attr-defined] builder.combined_commander = combined_commander
except Exception: except Exception:
pass pass
try: try:
builder.partner_mode = combined_commander.partner_mode # type: ignore[attr-defined] builder.partner_mode = combined_commander.partner_mode
except Exception: except Exception:
pass pass
try: try:
builder.secondary_commander = combined_commander.secondary_name # type: ignore[attr-defined] builder.secondary_commander = combined_commander.secondary_name
except Exception: except Exception:
pass pass
try: try:
builder.combined_color_identity = combined_commander.color_identity # type: ignore[attr-defined] builder.combined_color_identity = combined_commander.color_identity
builder.combined_theme_tags = combined_commander.theme_tags # type: ignore[attr-defined] builder.combined_theme_tags = combined_commander.theme_tags
builder.partner_warnings = combined_commander.warnings # type: ignore[attr-defined] builder.partner_warnings = combined_commander.warnings
except Exception: except Exception:
pass pass
@ -557,7 +557,7 @@ def _export_outputs(builder: DeckBuilder) -> None:
# Persist for downstream reuse (e.g., random_entrypoint / reroll flows) so they don't re-export # Persist for downstream reuse (e.g., random_entrypoint / reroll flows) so they don't re-export
if csv_path: if csv_path:
try: try:
builder.last_csv_path = csv_path # type: ignore[attr-defined] builder.last_csv_path = csv_path
except Exception: except Exception:
pass pass
except Exception: except Exception:
@ -572,7 +572,7 @@ def _export_outputs(builder: DeckBuilder) -> None:
finally: finally:
if txt_generated: if txt_generated:
try: try:
builder.last_txt_path = txt_generated # type: ignore[attr-defined] builder.last_txt_path = txt_generated
except Exception: except Exception:
pass pass
else: else:
@ -582,7 +582,7 @@ def _export_outputs(builder: DeckBuilder) -> None:
finally: finally:
if txt_generated: if txt_generated:
try: try:
builder.last_txt_path = txt_generated # type: ignore[attr-defined] builder.last_txt_path = txt_generated
except Exception: except Exception:
pass pass
except Exception: except Exception:
@ -1196,7 +1196,7 @@ def _run_random_mode(config: RandomRunConfig) -> int:
RandomConstraintsImpossibleError, RandomConstraintsImpossibleError,
RandomThemeNoMatchError, RandomThemeNoMatchError,
build_random_full_deck, build_random_full_deck,
) # type: ignore )
except Exception as exc: except Exception as exc:
print(f"Random mode unavailable: {exc}") print(f"Random mode unavailable: {exc}")
return 1 return 1

View file

@ -36,7 +36,7 @@ except Exception: # pragma: no cover
try: try:
# Support running as `python code/scripts/build_theme_catalog.py` when 'code' already on path # Support running as `python code/scripts/build_theme_catalog.py` when 'code' already on path
from scripts.extract_themes import ( # type: ignore from scripts.extract_themes import (
BASE_COLORS, BASE_COLORS,
collect_theme_tags_from_constants, collect_theme_tags_from_constants,
collect_theme_tags_from_tagger_source, collect_theme_tags_from_tagger_source,
@ -51,7 +51,7 @@ try:
) )
except ModuleNotFoundError: except ModuleNotFoundError:
# Fallback: direct relative import when running within scripts package context # Fallback: direct relative import when running within scripts package context
from extract_themes import ( # type: ignore from extract_themes import (
BASE_COLORS, BASE_COLORS,
collect_theme_tags_from_constants, collect_theme_tags_from_constants,
collect_theme_tags_from_tagger_source, collect_theme_tags_from_tagger_source,
@ -66,7 +66,7 @@ except ModuleNotFoundError:
) )
try: try:
from scripts.export_themes_to_yaml import slugify as slugify_theme # type: ignore from scripts.export_themes_to_yaml import slugify as slugify_theme
except Exception: except Exception:
_SLUG_RE = re.compile(r'[^a-z0-9-]') _SLUG_RE = re.compile(r'[^a-z0-9-]')
@ -951,7 +951,7 @@ def main(): # pragma: no cover
if args.schema: if args.schema:
# Lazy import to avoid circular dependency: replicate minimal schema inline from models file if present # Lazy import to avoid circular dependency: replicate minimal schema inline from models file if present
try: try:
from type_definitions_theme_catalog import ThemeCatalog # type: ignore from type_definitions_theme_catalog import ThemeCatalog
import json as _json import json as _json
print(_json.dumps(ThemeCatalog.model_json_schema(), indent=2)) print(_json.dumps(ThemeCatalog.model_json_schema(), indent=2))
return return
@ -990,8 +990,8 @@ def main(): # pragma: no cover
# Safeguard: if catalog dir missing, attempt to auto-export Phase A YAML first # Safeguard: if catalog dir missing, attempt to auto-export Phase A YAML first
if not CATALOG_DIR.exists(): # pragma: no cover (environmental) if not CATALOG_DIR.exists(): # pragma: no cover (environmental)
try: try:
from scripts.export_themes_to_yaml import main as export_main # type: ignore from scripts.export_themes_to_yaml import main as export_main
export_main(['--force']) # type: ignore[arg-type] export_main(['--force'])
except Exception as _e: except Exception as _e:
print(f"[build_theme_catalog] WARNING: catalog dir missing and auto export failed: {_e}", file=sys.stderr) print(f"[build_theme_catalog] WARNING: catalog dir missing and auto export failed: {_e}", file=sys.stderr)
if yaml is None: if yaml is None:
@ -1013,7 +1013,7 @@ def main(): # pragma: no cover
meta_block = raw.get('metadata_info') if isinstance(raw.get('metadata_info'), dict) else {} meta_block = raw.get('metadata_info') if isinstance(raw.get('metadata_info'), dict) else {}
# Legacy migration: if no metadata_info but legacy provenance present, adopt it # Legacy migration: if no metadata_info but legacy provenance present, adopt it
if not meta_block and isinstance(raw.get('provenance'), dict): if not meta_block and isinstance(raw.get('provenance'), dict):
meta_block = raw.get('provenance') # type: ignore meta_block = raw.get('provenance')
changed = True changed = True
if force or not meta_block.get('last_backfill'): if force or not meta_block.get('last_backfill'):
meta_block['last_backfill'] = time.strftime('%Y-%m-%dT%H:%M:%S') meta_block['last_backfill'] = time.strftime('%Y-%m-%dT%H:%M:%S')

View file

@ -41,7 +41,7 @@ SCRIPT_ROOT = Path(__file__).resolve().parent
CODE_ROOT = SCRIPT_ROOT.parent CODE_ROOT = SCRIPT_ROOT.parent
if str(CODE_ROOT) not in sys.path: if str(CODE_ROOT) not in sys.path:
sys.path.insert(0, str(CODE_ROOT)) sys.path.insert(0, str(CODE_ROOT))
from scripts.extract_themes import derive_synergies_for_tags # type: ignore from scripts.extract_themes import derive_synergies_for_tags
ROOT = Path(__file__).resolve().parents[2] ROOT = Path(__file__).resolve().parents[2]
THEME_JSON = ROOT / 'config' / 'themes' / 'theme_list.json' THEME_JSON = ROOT / 'config' / 'themes' / 'theme_list.json'

View file

@ -18,8 +18,8 @@ ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..'))
if ROOT not in sys.path: if ROOT not in sys.path:
sys.path.insert(0, ROOT) sys.path.insert(0, ROOT)
from code.settings import CSV_DIRECTORY # type: ignore from code.settings import CSV_DIRECTORY
from code.tagging import tag_constants # type: ignore from code.tagging import tag_constants
BASE_COLORS = { BASE_COLORS = {
'white': 'W', 'white': 'W',

View file

@ -32,7 +32,7 @@ if str(CODE_ROOT) not in sys.path:
sys.path.insert(0, str(CODE_ROOT)) sys.path.insert(0, str(CODE_ROOT))
try: try:
from code.settings import CSV_DIRECTORY as DEFAULT_CSV_DIRECTORY # type: ignore from code.settings import CSV_DIRECTORY as DEFAULT_CSV_DIRECTORY
except Exception: # pragma: no cover - fallback for adhoc execution except Exception: # pragma: no cover - fallback for adhoc execution
DEFAULT_CSV_DIRECTORY = "csv_files" DEFAULT_CSV_DIRECTORY = "csv_files"

View file

@ -42,7 +42,7 @@ def _sample_combinations(tags: List[str], iterations: int) -> List[Tuple[str | N
def _collect_tag_pool(df: pd.DataFrame) -> List[str]: def _collect_tag_pool(df: pd.DataFrame) -> List[str]:
tag_pool: set[str] = set() tag_pool: set[str] = set()
for tags in df.get("_ltags", []): # type: ignore[assignment] for tags in df.get("_ltags", []):
if not tags: if not tags:
continue continue
for token in tags: for token in tags:

View file

@ -37,7 +37,7 @@ def _refresh_setup() -> None:
def _refresh_tags() -> None: def _refresh_tags() -> None:
tagger = importlib.import_module("code.tagging.tagger") tagger = importlib.import_module("code.tagging.tagger")
tagger = importlib.reload(tagger) # type: ignore[assignment] tagger = importlib.reload(tagger)
for color in SUPPORTED_COLORS: for color in SUPPORTED_COLORS:
tagger.load_dataframe(color) tagger.load_dataframe(color)

View file

@ -21,7 +21,7 @@ PROJECT_ROOT = Path(__file__).resolve().parents[1]
if str(PROJECT_ROOT) not in sys.path: if str(PROJECT_ROOT) not in sys.path:
sys.path.append(str(PROJECT_ROOT)) sys.path.append(str(PROJECT_ROOT))
from deck_builder.random_entrypoint import ( # type: ignore # noqa: E402 from deck_builder.random_entrypoint import ( # noqa: E402
_build_random_theme_pool, _build_random_theme_pool,
_ensure_theme_tag_cache, _ensure_theme_tag_cache,
_load_commanders_df, _load_commanders_df,

View file

@ -731,7 +731,7 @@ def main(): # pragma: no cover (script orchestration)
if cand: if cand:
theme_card_hits[display] = cand theme_card_hits[display] = cand
# Build global duplicate frequency map ONCE (baseline prior to this run) if threshold active # Build global duplicate frequency map ONCE (baseline prior to this run) if threshold active
if args.common_card_threshold > 0 and 'GLOBAL_CARD_FREQ' not in globals(): # type: ignore if args.common_card_threshold > 0 and 'GLOBAL_CARD_FREQ' not in globals():
freq: Dict[str, int] = {} freq: Dict[str, int] = {}
total_themes = 0 total_themes = 0
for fp0 in CATALOG_DIR.glob('*.yml'): for fp0 in CATALOG_DIR.glob('*.yml'):
@ -748,10 +748,10 @@ def main(): # pragma: no cover (script orchestration)
continue continue
seen_local.add(c) seen_local.add(c)
freq[c] = freq.get(c, 0) + 1 freq[c] = freq.get(c, 0) + 1
globals()['GLOBAL_CARD_FREQ'] = (freq, total_themes) # type: ignore globals()['GLOBAL_CARD_FREQ'] = (freq, total_themes)
# Apply duplicate filtering to candidate lists (do NOT mutate existing example_cards) # Apply duplicate filtering to candidate lists (do NOT mutate existing example_cards)
if args.common_card_threshold > 0 and 'GLOBAL_CARD_FREQ' in globals(): # type: ignore if args.common_card_threshold > 0 and 'GLOBAL_CARD_FREQ' in globals():
freq_map, total_prev = globals()['GLOBAL_CARD_FREQ'] # type: ignore freq_map, total_prev = globals()['GLOBAL_CARD_FREQ']
if total_prev > 0: # avoid div-by-zero if total_prev > 0: # avoid div-by-zero
cutoff = args.common_card_threshold cutoff = args.common_card_threshold
def _filter(lst: List[Tuple[float, str, Set[str]]]) -> List[Tuple[float, str, Set[str]]]: def _filter(lst: List[Tuple[float, str, Set[str]]]) -> List[Tuple[float, str, Set[str]]]:
@ -803,8 +803,8 @@ def main(): # pragma: no cover (script orchestration)
print(f"[promote] modified {changed_count} themes") print(f"[promote] modified {changed_count} themes")
if args.fill_example_cards: if args.fill_example_cards:
print(f"[cards] modified {cards_changed} themes (target {args.cards_target})") print(f"[cards] modified {cards_changed} themes (target {args.cards_target})")
if args.print_dup_metrics and 'GLOBAL_CARD_FREQ' in globals(): # type: ignore if args.print_dup_metrics and 'GLOBAL_CARD_FREQ' in globals():
freq_map, total_prev = globals()['GLOBAL_CARD_FREQ'] # type: ignore freq_map, total_prev = globals()['GLOBAL_CARD_FREQ']
if total_prev: if total_prev:
items = sorted(freq_map.items(), key=lambda x: (-x[1], x[0]))[:30] items = sorted(freq_map.items(), key=lambda x: (-x[1], x[0]))[:30]
print('[dup-metrics] Top shared example_cards (baseline before this run):') print('[dup-metrics] Top shared example_cards (baseline before this run):')

View file

@ -31,9 +31,9 @@ CODE_ROOT = ROOT / 'code'
if str(CODE_ROOT) not in sys.path: if str(CODE_ROOT) not in sys.path:
sys.path.insert(0, str(CODE_ROOT)) sys.path.insert(0, str(CODE_ROOT))
from type_definitions_theme_catalog import ThemeCatalog, ThemeYAMLFile # type: ignore from type_definitions_theme_catalog import ThemeCatalog, ThemeYAMLFile
from scripts.extract_themes import load_whitelist_config # type: ignore from scripts.extract_themes import load_whitelist_config
from scripts.build_theme_catalog import build_catalog # type: ignore from scripts.build_theme_catalog import build_catalog
CATALOG_JSON = ROOT / 'config' / 'themes' / 'theme_list.json' CATALOG_JSON = ROOT / 'config' / 'themes' / 'theme_list.json'

View file

@ -89,11 +89,8 @@ COLUMN_ORDER = CARD_COLUMN_ORDER
TAGGED_COLUMN_ORDER = CARD_COLUMN_ORDER TAGGED_COLUMN_ORDER = CARD_COLUMN_ORDER
REQUIRED_COLUMNS = REQUIRED_CARD_COLUMNS REQUIRED_COLUMNS = REQUIRED_CARD_COLUMNS
MAIN_MENU_ITEMS: List[str] = ['Build A Deck', 'Setup CSV Files', 'Tag CSV Files', 'Quit'] # MAIN_MENU_ITEMS, SETUP_MENU_ITEMS, CSV_DIRECTORY already defined above (lines 67-70)
SETUP_MENU_ITEMS: List[str] = ['Initial Setup', 'Regenerate CSV', 'Main Menu']
CSV_DIRECTORY: str = 'csv_files'
CARD_FILES_DIRECTORY: str = 'card_files' # Parquet files for consolidated card data CARD_FILES_DIRECTORY: str = 'card_files' # Parquet files for consolidated card data
# ---------------------------------------------------------------------------------- # ----------------------------------------------------------------------------------
@ -111,11 +108,7 @@ CARD_FILES_PROCESSED_DIR = os.getenv('CARD_FILES_PROCESSED_DIR', os.path.join(CA
# Set to '1' or 'true' to enable CSV fallback when Parquet loading fails # Set to '1' or 'true' to enable CSV fallback when Parquet loading fails
LEGACY_CSV_COMPAT = os.getenv('LEGACY_CSV_COMPAT', '0').lower() in ('1', 'true', 'on', 'enabled') LEGACY_CSV_COMPAT = os.getenv('LEGACY_CSV_COMPAT', '0').lower() in ('1', 'true', 'on', 'enabled')
# Configuration for handling null/NA values in DataFrame columns # FILL_NA_COLUMNS already defined above (lines 75-78)
FILL_NA_COLUMNS: Dict[str, Optional[str]] = {
'colorIdentity': 'Colorless', # Default color identity for cards without one
'faceName': None # Use card's name column value when face name is not available
}
# ---------------------------------------------------------------------------------- # ----------------------------------------------------------------------------------
# ALL CARDS CONSOLIDATION FEATURE FLAG # ALL CARDS CONSOLIDATION FEATURE FLAG

View file

@ -30,14 +30,14 @@ try:
import logging_util import logging_util
except Exception: except Exception:
# Fallback for direct module loading # Fallback for direct module loading
import importlib.util # type: ignore import importlib.util
root = Path(__file__).resolve().parents[1] root = Path(__file__).resolve().parents[1]
lu_path = root / 'logging_util.py' lu_path = root / 'logging_util.py'
spec = importlib.util.spec_from_file_location('logging_util', str(lu_path)) spec = importlib.util.spec_from_file_location('logging_util', str(lu_path))
mod = importlib.util.module_from_spec(spec) # type: ignore[arg-type] mod = importlib.util.module_from_spec(spec) # type: ignore[arg-type]
assert spec and spec.loader assert spec and spec.loader
spec.loader.exec_module(mod) # type: ignore[assignment] spec.loader.exec_module(mod)
logging_util = mod # type: ignore logging_util = mod
logger = logging_util.logging.getLogger(__name__) logger = logging_util.logging.getLogger(__name__)
logger.setLevel(logging_util.LOG_LEVEL) logger.setLevel(logging_util.LOG_LEVEL)

View file

@ -173,7 +173,7 @@ def _merge_summary_recorder(color: str):
def _write_compat_snapshot(df: pd.DataFrame, color: str) -> None: def _write_compat_snapshot(df: pd.DataFrame, color: str) -> None:
try: # type: ignore[name-defined] try:
_DFC_COMPAT_DIR.mkdir(parents=True, exist_ok=True) _DFC_COMPAT_DIR.mkdir(parents=True, exist_ok=True)
path = _DFC_COMPAT_DIR / f"{color}_cards_unmerged.csv" path = _DFC_COMPAT_DIR / f"{color}_cards_unmerged.csv"
df.to_csv(path, index=False) df.to_csv(path, index=False)

View file

@ -173,7 +173,7 @@ def _merge_summary_recorder(color: str):
def _write_compat_snapshot(df: pd.DataFrame, color: str) -> None: def _write_compat_snapshot(df: pd.DataFrame, color: str) -> None:
"""Write DFC compatibility snapshot (diagnostic output, kept as CSV for now).""" """Write DFC compatibility snapshot (diagnostic output, kept as CSV for now)."""
try: # type: ignore[name-defined] try:
_DFC_COMPAT_DIR.mkdir(parents=True, exist_ok=True) _DFC_COMPAT_DIR.mkdir(parents=True, exist_ok=True)
path = _DFC_COMPAT_DIR / f"{color}_cards_unmerged.csv" path = _DFC_COMPAT_DIR / f"{color}_cards_unmerged.csv"
df.to_csv(path, index=False) # M3: Kept as CSV (diagnostic only, not main data flow) df.to_csv(path, index=False) # M3: Kept as CSV (diagnostic only, not main data flow)

View file

@ -11,9 +11,9 @@ def _load_applier():
root = Path(__file__).resolve().parents[2] root = Path(__file__).resolve().parents[2]
mod_path = root / 'code' / 'tagging' / 'bracket_policy_applier.py' mod_path = root / 'code' / 'tagging' / 'bracket_policy_applier.py'
spec = importlib.util.spec_from_file_location('bracket_policy_applier', str(mod_path)) spec = importlib.util.spec_from_file_location('bracket_policy_applier', str(mod_path))
mod = importlib.util.module_from_spec(spec) # type: ignore[arg-type] mod = importlib.util.module_from_spec(spec)
assert spec and spec.loader assert spec and spec.loader
spec.loader.exec_module(mod) # type: ignore[assignment] spec.loader.exec_module(mod)
return mod return mod

View file

@ -30,8 +30,8 @@ def test_card_index_color_identity_list_handles_edge_cases(tmp_path, monkeypatch
csv_path = write_csv(tmp_path) csv_path = write_csv(tmp_path)
monkeypatch.setenv("CARD_INDEX_EXTRA_CSV", str(csv_path)) monkeypatch.setenv("CARD_INDEX_EXTRA_CSV", str(csv_path))
# Force rebuild # Force rebuild
card_index._CARD_INDEX.clear() # type: ignore card_index._CARD_INDEX.clear()
card_index._CARD_INDEX_MTIME = None # type: ignore card_index._CARD_INDEX_MTIME = None
card_index.maybe_build_index() card_index.maybe_build_index()
pool = card_index.get_tag_pool("Blink") pool = card_index.get_tag_pool("Blink")

View file

@ -8,7 +8,7 @@ from urllib.parse import parse_qs, urlparse
import pytest import pytest
from fastapi.testclient import TestClient from fastapi.testclient import TestClient
from code.web.app import app # type: ignore from code.web.app import app
from code.web.services.commander_catalog_loader import clear_commander_catalog_cache from code.web.services.commander_catalog_loader import clear_commander_catalog_cache

View file

@ -5,7 +5,7 @@ from pathlib import Path
import pytest import pytest
from fastapi.testclient import TestClient from fastapi.testclient import TestClient
from code.web.app import app # type: ignore from code.web.app import app
from code.web.services import telemetry from code.web.services import telemetry
from code.web.services.commander_catalog_loader import clear_commander_catalog_cache from code.web.services.commander_catalog_loader import clear_commander_catalog_cache

View file

@ -7,7 +7,7 @@ from types import SimpleNamespace
import pytest import pytest
from fastapi.testclient import TestClient from fastapi.testclient import TestClient
from code.web.app import app # type: ignore from code.web.app import app
from code.web.routes import commanders from code.web.routes import commanders
from code.web.services import commander_catalog_loader from code.web.services import commander_catalog_loader
from code.web.services.commander_catalog_loader import clear_commander_catalog_cache, load_commander_catalog from code.web.services.commander_catalog_loader import clear_commander_catalog_cache, load_commander_catalog

View file

@ -24,7 +24,7 @@ def load_app_with_env(**env: str) -> types.ModuleType:
os.environ.pop(key, None) os.environ.pop(key, None)
for k, v in env.items(): for k, v in env.items():
os.environ[k] = v os.environ[k] = v
import code.web.app as app_module # type: ignore import code.web.app as app_module
importlib.reload(app_module) importlib.reload(app_module)
return app_module return app_module

View file

@ -50,7 +50,7 @@ def _load_catalog() -> Dict[str, Any]:
def test_deterministic_build_under_seed(): def test_deterministic_build_under_seed():
# Import build after setting seed env # Import build after setting seed env
os.environ['EDITORIAL_SEED'] = '999' os.environ['EDITORIAL_SEED'] = '999'
from scripts.build_theme_catalog import build_catalog # type: ignore from scripts.build_theme_catalog import build_catalog
first = build_catalog(limit=0, verbose=False) first = build_catalog(limit=0, verbose=False)
second = build_catalog(limit=0, verbose=False) second = build_catalog(limit=0, verbose=False)
# Drop volatile metadata_info/timestamp fields before comparison # Drop volatile metadata_info/timestamp fields before comparison
@ -106,7 +106,7 @@ def test_metadata_info_block_coverage():
def test_synergy_commanders_exclusion_of_examples(): def test_synergy_commanders_exclusion_of_examples():
import yaml # type: ignore import yaml
pattern = re.compile(r" - Synergy \(.*\)$") pattern = re.compile(r" - Synergy \(.*\)$")
violations: List[str] = [] violations: List[str] = []
for p in CATALOG_DIR.glob('*.yml'): for p in CATALOG_DIR.glob('*.yml'):
@ -128,7 +128,7 @@ def test_synergy_commanders_exclusion_of_examples():
def test_mapping_trigger_specialization_guard(): def test_mapping_trigger_specialization_guard():
import yaml # type: ignore import yaml
assert MAPPING.exists(), "description_mapping.yml missing" assert MAPPING.exists(), "description_mapping.yml missing"
mapping_yaml = yaml.safe_load(MAPPING.read_text(encoding='utf-8')) or [] mapping_yaml = yaml.safe_load(MAPPING.read_text(encoding='utf-8')) or []
triggers: Set[str] = set() triggers: Set[str] = set()

View file

@ -20,7 +20,7 @@ def load_app_with_env(**env: str) -> types.ModuleType:
os.environ.pop(key, None) os.environ.pop(key, None)
for k, v in env.items(): for k, v in env.items():
os.environ[k] = v os.environ[k] = v
import code.web.app as app_module # type: ignore import code.web.app as app_module
importlib.reload(app_module) importlib.reload(app_module)
return app_module return app_module

View file

@ -14,7 +14,7 @@ class DummyBuilder(ReportingMixin):
self.card_library = card_library self.card_library = card_library
self.color_identity = colors self.color_identity = colors
self.output_lines: List[str] = [] self.output_lines: List[str] = []
self.output_func = self.output_lines.append # type: ignore[assignment] self.output_func = self.output_lines.append
self._full_cards_df = None self._full_cards_df = None
self._combined_cards_df = None self._combined_cards_df = None
self.include_exclude_diagnostics = None self.include_exclude_diagnostics = None

View file

@ -20,7 +20,7 @@ def _stub_modal_matrix(builder: DeckBuilder) -> None:
"Forest": {"G": 1}, "Forest": {"G": 1},
} }
builder._compute_color_source_matrix = MethodType(fake_matrix, builder) # type: ignore[attr-defined] builder._compute_color_source_matrix = MethodType(fake_matrix, builder)
def test_modal_dfc_swaps_basic_when_enabled(): def test_modal_dfc_swaps_basic_when_enabled():

View file

@ -18,7 +18,7 @@ def test_multicopy_clamp_trims_current_stage_additions_only():
# Preseed 95 cards in the library # Preseed 95 cards in the library
b.card_library = {"Filler": {"Count": 95, "Role": "Test", "SubRole": "", "AddedBy": "Test"}} b.card_library = {"Filler": {"Count": 95, "Role": "Test", "SubRole": "", "AddedBy": "Test"}}
# Set a multi-copy selection that would exceed 100 by 15 # Set a multi-copy selection that would exceed 100 by 15
b._web_multi_copy = { # type: ignore[attr-defined] b._web_multi_copy = {
"id": "persistent_petitioners", "id": "persistent_petitioners",
"name": "Persistent Petitioners", "name": "Persistent Petitioners",
"count": 20, "count": 20,

View file

@ -23,7 +23,7 @@ def test_petitioners_clamp_to_100_and_reduce_creature_slots():
"card_advantage": 8, "protection": 4, "card_advantage": 8, "protection": 4,
} }
# Thread multi-copy selection for Petitioners as a creature archetype # Thread multi-copy selection for Petitioners as a creature archetype
b._web_multi_copy = { # type: ignore[attr-defined] b._web_multi_copy = {
"id": "persistent_petitioners", "id": "persistent_petitioners",
"name": "Persistent Petitioners", "name": "Persistent Petitioners",
"count": 40, # intentionally large to trigger clamp/adjustments "count": 40, # intentionally large to trigger clamp/adjustments

View file

@ -17,7 +17,7 @@ def _minimal_ctx(selection: dict):
b = DeckBuilder(output_func=out, input_func=lambda *_: "", headless=True) b = DeckBuilder(output_func=out, input_func=lambda *_: "", headless=True)
# Thread selection and ensure empty library # Thread selection and ensure empty library
b._web_multi_copy = selection # type: ignore[attr-defined] b._web_multi_copy = selection
b.card_library = {} b.card_library = {}
ctx = { ctx = {

View file

@ -1,7 +1,7 @@
import importlib import importlib
import pytest import pytest
try: try:
from starlette.testclient import TestClient # type: ignore from starlette.testclient import TestClient
except Exception: # pragma: no cover - optional dep in CI except Exception: # pragma: no cover - optional dep in CI
TestClient = None # type: ignore TestClient = None # type: ignore

View file

@ -128,7 +128,7 @@ def _make_request(path: str = "/api/partner/suggestions", query_string: str = ""
"client": ("203.0.113.5", 52345), "client": ("203.0.113.5", 52345),
"server": ("testserver", 80), "server": ("testserver", 80),
} }
request = Request(scope, receive=_receive) # type: ignore[arg-type] request = Request(scope, receive=_receive)
request.state.request_id = "req-telemetry" request.state.request_id = "req-telemetry"
return request return request
@ -197,21 +197,21 @@ def test_load_dataset_refresh_retries_after_prior_failure(tmp_path: Path, monkey
from code.web.services import orchestrator as orchestrator_service from code.web.services import orchestrator as orchestrator_service
original_default = partner_service.DEFAULT_DATASET_PATH original_default = partner_service.DEFAULT_DATASET_PATH
original_path = partner_service._DATASET_PATH # type: ignore[attr-defined] original_path = partner_service._DATASET_PATH
original_cache = partner_service._DATASET_CACHE # type: ignore[attr-defined] original_cache = partner_service._DATASET_CACHE
original_attempted = partner_service._DATASET_REFRESH_ATTEMPTED # type: ignore[attr-defined] original_attempted = partner_service._DATASET_REFRESH_ATTEMPTED
partner_service.DEFAULT_DATASET_PATH = dataset_path partner_service.DEFAULT_DATASET_PATH = dataset_path
partner_service._DATASET_PATH = dataset_path # type: ignore[attr-defined] partner_service._DATASET_PATH = dataset_path
partner_service._DATASET_CACHE = None # type: ignore[attr-defined] partner_service._DATASET_CACHE = None
partner_service._DATASET_REFRESH_ATTEMPTED = True # type: ignore[attr-defined] partner_service._DATASET_REFRESH_ATTEMPTED = True
calls = {"count": 0} calls = {"count": 0}
payload_path = tmp_path / "seed_dataset.json" payload_path = tmp_path / "seed_dataset.json"
_write_dataset(payload_path) _write_dataset(payload_path)
def seeded_refresh(out_func=None, *, force=False, root=None): # type: ignore[override] def seeded_refresh(out_func=None, *, force=False, root=None):
calls["count"] += 1 calls["count"] += 1
dataset_path.write_text(payload_path.read_text(encoding="utf-8"), encoding="utf-8") dataset_path.write_text(payload_path.read_text(encoding="utf-8"), encoding="utf-8")
@ -227,9 +227,9 @@ def test_load_dataset_refresh_retries_after_prior_failure(tmp_path: Path, monkey
assert calls["count"] == 1 assert calls["count"] == 1
finally: finally:
partner_service.DEFAULT_DATASET_PATH = original_default partner_service.DEFAULT_DATASET_PATH = original_default
partner_service._DATASET_PATH = original_path # type: ignore[attr-defined] partner_service._DATASET_PATH = original_path
partner_service._DATASET_CACHE = original_cache # type: ignore[attr-defined] partner_service._DATASET_CACHE = original_cache
partner_service._DATASET_REFRESH_ATTEMPTED = original_attempted # type: ignore[attr-defined] partner_service._DATASET_REFRESH_ATTEMPTED = original_attempted
try: try:
dataset_path.unlink() dataset_path.unlink()
except FileNotFoundError: except FileNotFoundError:

View file

@ -33,7 +33,7 @@ def _invoke_helper(
) -> list[tuple[list[str], str]]: ) -> list[tuple[list[str], str]]:
calls: list[tuple[list[str], str]] = [] calls: list[tuple[list[str], str]] = []
def _fake_run(cmd, check=False, cwd=None): # type: ignore[no-untyped-def] def _fake_run(cmd, check=False, cwd=None):
calls.append((list(cmd), cwd)) calls.append((list(cmd), cwd))
class _Completed: class _Completed:
returncode = 0 returncode = 0

View file

@ -10,7 +10,7 @@ fastapi = pytest.importorskip("fastapi")
def load_app_with_env(**env: str) -> types.ModuleType: def load_app_with_env(**env: str) -> types.ModuleType:
for k,v in env.items(): for k,v in env.items():
os.environ[k] = v os.environ[k] = v
import code.web.app as app_module # type: ignore import code.web.app as app_module
importlib.reload(app_module) importlib.reload(app_module)
return app_module return app_module

View file

@ -1,7 +1,7 @@
import json import json
from fastapi.testclient import TestClient from fastapi.testclient import TestClient
from code.web.app import app # type: ignore from code.web.app import app
def test_preview_includes_curated_examples_regression(): def test_preview_includes_curated_examples_regression():

View file

@ -1,8 +1,8 @@
import os import os
from code.web.services.theme_preview import get_theme_preview, bust_preview_cache # type: ignore from code.web.services.theme_preview import get_theme_preview, bust_preview_cache
from code.web.services import preview_cache as pc # type: ignore from code.web.services import preview_cache as pc
from code.web.services.preview_metrics import preview_metrics # type: ignore from code.web.services.preview_metrics import preview_metrics
def _prime(slug: str, limit: int = 12, hits: int = 0, *, colors=None): def _prime(slug: str, limit: int = 12, hits: int = 0, *, colors=None):
@ -89,7 +89,7 @@ def test_env_weight_override(monkeypatch):
bust_preview_cache() bust_preview_cache()
# Clear module-level caches for weights # Clear module-level caches for weights
if hasattr(pc, '_EVICT_WEIGHTS_CACHE'): if hasattr(pc, '_EVICT_WEIGHTS_CACHE'):
pc._EVICT_WEIGHTS_CACHE = None # type: ignore pc._EVICT_WEIGHTS_CACHE = None
# Create two entries: one older with many hits, one fresh with none. # Create two entries: one older with many hits, one fresh with none.
_prime('Blink', limit=6, hits=6, colors=None) # older hot entry _prime('Blink', limit=6, hits=6, colors=None) # older hot entry
old_key = next(iter(pc.PREVIEW_CACHE.keys())) old_key = next(iter(pc.PREVIEW_CACHE.keys()))

View file

@ -1,6 +1,6 @@
import os import os
from code.web.services.theme_preview import get_theme_preview, bust_preview_cache # type: ignore from code.web.services.theme_preview import get_theme_preview, bust_preview_cache
from code.web.services import preview_cache as pc # type: ignore from code.web.services import preview_cache as pc
def test_basic_low_score_eviction(monkeypatch): def test_basic_low_score_eviction(monkeypatch):
@ -17,7 +17,7 @@ def test_basic_low_score_eviction(monkeypatch):
get_theme_preview('Blink', limit=6, colors=c) get_theme_preview('Blink', limit=6, colors=c)
# Cache limit 5, inserted 6 distinct -> eviction should have occurred # Cache limit 5, inserted 6 distinct -> eviction should have occurred
assert len(pc.PREVIEW_CACHE) <= 5 assert len(pc.PREVIEW_CACHE) <= 5
from code.web.services.preview_metrics import preview_metrics # type: ignore from code.web.services.preview_metrics import preview_metrics
m = preview_metrics() m = preview_metrics()
assert m['preview_cache_evictions'] >= 1, 'Expected at least one eviction' assert m['preview_cache_evictions'] >= 1, 'Expected at least one eviction'
assert m['preview_cache_evictions_by_reason'].get('low_score', 0) >= 1 assert m['preview_cache_evictions_by_reason'].get('low_score', 0) >= 1

View file

@ -1,5 +1,5 @@
from fastapi.testclient import TestClient from fastapi.testclient import TestClient
from code.web.app import app # type: ignore from code.web.app import app
def test_minimal_variant_hides_controls_and_headers(): def test_minimal_variant_hides_controls_and_headers():

View file

@ -8,7 +8,7 @@ pytestmark = pytest.mark.skip(reason="M4: preview_perf_benchmark module removed
def test_fetch_all_theme_slugs_retries(monkeypatch): def test_fetch_all_theme_slugs_retries(monkeypatch):
calls = {"count": 0} calls = {"count": 0}
def fake_fetch(url): # type: ignore[override] def fake_fetch(url):
calls["count"] += 1 calls["count"] += 1
if calls["count"] == 1: if calls["count"] == 1:
raise RuntimeError("transient 500") raise RuntimeError("transient 500")
@ -27,7 +27,7 @@ def test_fetch_all_theme_slugs_retries(monkeypatch):
def test_fetch_all_theme_slugs_page_level_retry(monkeypatch): def test_fetch_all_theme_slugs_page_level_retry(monkeypatch):
calls = {"count": 0} calls = {"count": 0}
def fake_fetch_with_retry(url, attempts=3, delay=0.6): # type: ignore[override] def fake_fetch_with_retry(url, attempts=3, delay=0.6):
calls["count"] += 1 calls["count"] += 1
if calls["count"] < 3: if calls["count"] < 3:
raise RuntimeError("service warming up") raise RuntimeError("service warming up")

View file

@ -1,5 +1,5 @@
from fastapi.testclient import TestClient from fastapi.testclient import TestClient
from code.web.app import app # type: ignore from code.web.app import app
def test_preview_fragment_suppress_curated_removes_examples(): def test_preview_fragment_suppress_curated_removes_examples():

View file

@ -3,16 +3,16 @@ from code.web.services import preview_cache as pc
def _force_interval_elapsed(): def _force_interval_elapsed():
# Ensure adaptation interval guard passes # Ensure adaptation interval guard passes
if pc._LAST_ADAPT_AT is not None: # type: ignore[attr-defined] if pc._LAST_ADAPT_AT is not None:
pc._LAST_ADAPT_AT -= (pc._ADAPT_INTERVAL_S + 1) # type: ignore[attr-defined] pc._LAST_ADAPT_AT -= (pc._ADAPT_INTERVAL_S + 1)
def test_ttl_adapts_down_and_up(capsys): def test_ttl_adapts_down_and_up(capsys):
# Enable adaptation regardless of env # Enable adaptation regardless of env
pc._ADAPTATION_ENABLED = True # type: ignore[attr-defined] pc._ADAPTATION_ENABLED = True
pc.TTL_SECONDS = pc._TTL_BASE # type: ignore[attr-defined] pc.TTL_SECONDS = pc._TTL_BASE
pc._RECENT_HITS.clear() # type: ignore[attr-defined] pc._RECENT_HITS.clear()
pc._LAST_ADAPT_AT = None # type: ignore[attr-defined] pc._LAST_ADAPT_AT = None
# Low hit ratio pattern (~0.1) # Low hit ratio pattern (~0.1)
for _ in range(72): for _ in range(72):
@ -23,11 +23,11 @@ def test_ttl_adapts_down_and_up(capsys):
out1 = capsys.readouterr().out out1 = capsys.readouterr().out
assert "theme_preview_ttl_adapt" in out1, "expected adaptation log for low hit ratio" assert "theme_preview_ttl_adapt" in out1, "expected adaptation log for low hit ratio"
ttl_after_down = pc.TTL_SECONDS ttl_after_down = pc.TTL_SECONDS
assert ttl_after_down <= pc._TTL_BASE # type: ignore[attr-defined] assert ttl_after_down <= pc._TTL_BASE
# Force interval elapsed & high hit ratio pattern (~0.9) # Force interval elapsed & high hit ratio pattern (~0.9)
_force_interval_elapsed() _force_interval_elapsed()
pc._RECENT_HITS.clear() # type: ignore[attr-defined] pc._RECENT_HITS.clear()
for _ in range(72): for _ in range(72):
pc.record_request_hit(True) pc.record_request_hit(True)
for _ in range(8): for _ in range(8):

View file

@ -19,17 +19,17 @@ def _client_with_flags(window_s: int = 2, limit_random: int = 2, limit_build: in
# Force fresh import so RATE_LIMIT_* constants reflect env # Force fresh import so RATE_LIMIT_* constants reflect env
sys.modules.pop('code.web.app', None) sys.modules.pop('code.web.app', None)
from code.web import app as app_module # type: ignore from code.web import app as app_module
# Force override constants for deterministic test # Force override constants for deterministic test
try: try:
app_module.RATE_LIMIT_ENABLED = True # type: ignore[attr-defined] app_module.RATE_LIMIT_ENABLED = True
app_module.RATE_LIMIT_WINDOW_S = window_s # type: ignore[attr-defined] app_module.RATE_LIMIT_WINDOW_S = window_s
app_module.RATE_LIMIT_RANDOM = limit_random # type: ignore[attr-defined] app_module.RATE_LIMIT_RANDOM = limit_random
app_module.RATE_LIMIT_BUILD = limit_build # type: ignore[attr-defined] app_module.RATE_LIMIT_BUILD = limit_build
app_module.RATE_LIMIT_SUGGEST = limit_suggest # type: ignore[attr-defined] app_module.RATE_LIMIT_SUGGEST = limit_suggest
# Reset in-memory counters # Reset in-memory counters
if hasattr(app_module, '_RL_COUNTS'): if hasattr(app_module, '_RL_COUNTS'):
app_module._RL_COUNTS.clear() # type: ignore[attr-defined] app_module._RL_COUNTS.clear()
except Exception: except Exception:
pass pass
return TestClient(app_module.app) return TestClient(app_module.app)

View file

@ -3,8 +3,8 @@ from pathlib import Path
from fastapi.testclient import TestClient from fastapi.testclient import TestClient
from code.web import app as web_app # type: ignore from code.web import app as web_app
from code.web.app import app # type: ignore from code.web.app import app
# Ensure project root on sys.path for absolute imports # Ensure project root on sys.path for absolute imports
ROOT = Path(__file__).resolve().parents[2] ROOT = Path(__file__).resolve().parents[2]

View file

@ -9,17 +9,17 @@ def setup_module(module): # ensure deterministic env weights
def test_rarity_diminishing(): def test_rarity_diminishing():
# Monkeypatch internal index # Monkeypatch internal index
card_index._CARD_INDEX.clear() # type: ignore card_index._CARD_INDEX.clear()
theme = "Test Theme" theme = "Test Theme"
card_index._CARD_INDEX[theme] = [ # type: ignore card_index._CARD_INDEX[theme] = [
{"name": "Mythic One", "tags": [theme], "color_identity": "G", "mana_cost": "G", "rarity": "mythic"}, {"name": "Mythic One", "tags": [theme], "color_identity": "G", "mana_cost": "G", "rarity": "mythic"},
{"name": "Mythic Two", "tags": [theme], "color_identity": "G", "mana_cost": "G", "rarity": "mythic"}, {"name": "Mythic Two", "tags": [theme], "color_identity": "G", "mana_cost": "G", "rarity": "mythic"},
] ]
def no_build(): def no_build():
return None return None
sampling.maybe_build_index = no_build # type: ignore sampling.maybe_build_index = no_build
cards = sampling.sample_real_cards_for_theme(theme, 2, None, synergies=[theme], commander=None) cards = sampling.sample_real_cards_for_theme(theme, 2, None, synergies=[theme], commander=None)
rarity_weights = [r for c in cards for r in c["reasons"] if r.startswith("rarity_weight_calibrated")] # type: ignore rarity_weights = [r for c in cards for r in c["reasons"] if r.startswith("rarity_weight_calibrated")]
assert len(rarity_weights) >= 2 assert len(rarity_weights) >= 2
v1 = float(rarity_weights[0].split(":")[-1]) v1 = float(rarity_weights[0].split(":")[-1])
v2 = float(rarity_weights[1].split(":")[-1]) v2 = float(rarity_weights[1].split(":")[-1])
@ -40,15 +40,15 @@ def test_commander_overlap_monotonic_diminishing():
def test_splash_off_color_penalty_applied(): def test_splash_off_color_penalty_applied():
card_index._CARD_INDEX.clear() # type: ignore card_index._CARD_INDEX.clear()
theme = "Splash Theme" theme = "Splash Theme"
# Commander W U B R (4 colors) # Commander W U B R (4 colors)
commander = {"name": "CommanderTest", "tags": [theme], "color_identity": "WUBR", "mana_cost": "", "rarity": "mythic"} commander = {"name": "CommanderTest", "tags": [theme], "color_identity": "WUBR", "mana_cost": "", "rarity": "mythic"}
# Card with single off-color G (W U B R G) # Card with single off-color G (W U B R G)
splash_card = {"name": "CardSplash", "tags": [theme], "color_identity": "WUBRG", "mana_cost": "G", "rarity": "rare"} splash_card = {"name": "CardSplash", "tags": [theme], "color_identity": "WUBRG", "mana_cost": "G", "rarity": "rare"}
card_index._CARD_INDEX[theme] = [commander, splash_card] # type: ignore card_index._CARD_INDEX[theme] = [commander, splash_card]
sampling.maybe_build_index = lambda: None # type: ignore sampling.maybe_build_index = lambda: None
cards = sampling.sample_real_cards_for_theme(theme, 2, None, synergies=[theme], commander="CommanderTest") cards = sampling.sample_real_cards_for_theme(theme, 2, None, synergies=[theme], commander="CommanderTest")
splash = next((c for c in cards if c["name"] == "CardSplash"), None) splash = next((c for c in cards if c["name"] == "CardSplash"), None)
assert splash is not None assert splash is not None
assert any(r.startswith("splash_off_color_penalty") for r in splash["reasons"]) # type: ignore assert any(r.startswith("splash_off_color_penalty") for r in splash["reasons"])

View file

@ -1,5 +1,5 @@
import re import re
from code.web.services.theme_preview import get_theme_preview # type: ignore from code.web.services.theme_preview import get_theme_preview
# We can't easily execute the JS normalizeCardName in Python, but we can ensure # We can't easily execute the JS normalizeCardName in Python, but we can ensure
# server-delivered sample names that include appended synergy annotations are not # server-delivered sample names that include appended synergy annotations are not

View file

@ -10,7 +10,7 @@ fastapi = pytest.importorskip("fastapi") # skip if FastAPI missing
def load_app_with_env(**env: str) -> types.ModuleType: def load_app_with_env(**env: str) -> types.ModuleType:
for k, v in env.items(): for k, v in env.items():
os.environ[k] = v os.environ[k] = v
import code.web.app as app_module # type: ignore import code.web.app as app_module
importlib.reload(app_module) importlib.reload(app_module)
return app_module return app_module

View file

@ -2,7 +2,7 @@ import sys
from pathlib import Path from pathlib import Path
import pytest import pytest
from fastapi.testclient import TestClient from fastapi.testclient import TestClient
from code.web.app import app # type: ignore from code.web.app import app
# Ensure project root on sys.path for absolute imports # Ensure project root on sys.path for absolute imports
ROOT = Path(__file__).resolve().parents[2] ROOT = Path(__file__).resolve().parents[2]

View file

@ -146,7 +146,7 @@ def test_generate_theme_catalog_basic(tmp_path: Path, fixed_now: datetime) -> No
assert all(row['last_generated_at'] == result.generated_at for row in rows) assert all(row['last_generated_at'] == result.generated_at for row in rows)
assert all(row['version'] == result.version for row in rows) assert all(row['version'] == result.version for row in rows)
expected_hash = new_catalog._compute_version_hash([row['theme'] for row in rows]) # type: ignore[attr-defined] expected_hash = new_catalog._compute_version_hash([row['theme'] for row in rows])
assert result.version == expected_hash assert result.version == expected_hash

View file

@ -4,7 +4,7 @@ import os
import importlib import importlib
from pathlib import Path from pathlib import Path
from starlette.testclient import TestClient from starlette.testclient import TestClient
from code.type_definitions_theme_catalog import ThemeCatalog # type: ignore from code.type_definitions_theme_catalog import ThemeCatalog
CATALOG_PATH = Path('config/themes/theme_list.json') CATALOG_PATH = Path('config/themes/theme_list.json')

View file

@ -8,7 +8,7 @@ def test_theme_list_json_validates_against_pydantic_and_fast_path():
raw = json.loads(p.read_text(encoding='utf-8')) raw = json.loads(p.read_text(encoding='utf-8'))
# Pydantic validation # Pydantic validation
from code.type_definitions_theme_catalog import ThemeCatalog # type: ignore from code.type_definitions_theme_catalog import ThemeCatalog
catalog = ThemeCatalog(**raw) catalog = ThemeCatalog(**raw)
assert isinstance(catalog.themes, list) and len(catalog.themes) > 0 assert isinstance(catalog.themes, list) and len(catalog.themes) > 0
# Basic fields exist on entries # Basic fields exist on entries

View file

@ -36,7 +36,7 @@ from fastapi.testclient import TestClient
def _get_app(): # local import to avoid heavy import cost if file unused def _get_app(): # local import to avoid heavy import cost if file unused
from code.web.app import app # type: ignore from code.web.app import app
return app return app
@ -115,13 +115,13 @@ def test_preview_cache_hit_timing(monkeypatch, client):
r1 = client.get(f"/themes/fragment/preview/{theme_id}?limit=12") r1 = client.get(f"/themes/fragment/preview/{theme_id}?limit=12")
assert r1.status_code == 200 assert r1.status_code == 200
# Monkeypatch theme_preview._now to freeze time so second call counts as hit # Monkeypatch theme_preview._now to freeze time so second call counts as hit
import code.web.services.theme_preview as tp # type: ignore import code.web.services.theme_preview as tp
orig_now = tp._now orig_now = tp._now
monkeypatch.setattr(tp, "_now", lambda: orig_now()) monkeypatch.setattr(tp, "_now", lambda: orig_now())
r2 = client.get(f"/themes/fragment/preview/{theme_id}?limit=12") r2 = client.get(f"/themes/fragment/preview/{theme_id}?limit=12")
assert r2.status_code == 200 assert r2.status_code == 200
# Deterministic service-level verification: second direct function call should short-circuit via cache # Deterministic service-level verification: second direct function call should short-circuit via cache
import code.web.services.theme_preview as tp # type: ignore import code.web.services.theme_preview as tp
# Snapshot counters # Snapshot counters
pre_hits = getattr(tp, "_PREVIEW_CACHE_HITS", 0) pre_hits = getattr(tp, "_PREVIEW_CACHE_HITS", 0)
first_payload = tp.get_theme_preview(theme_id, limit=12) first_payload = tp.get_theme_preview(theme_id, limit=12)

View file

@ -16,7 +16,7 @@ def _new_client(prewarm: bool = False) -> TestClient:
# Remove existing module (if any) so lifespan runs again # Remove existing module (if any) so lifespan runs again
if 'code.web.app' in list(importlib.sys.modules.keys()): if 'code.web.app' in list(importlib.sys.modules.keys()):
importlib.sys.modules.pop('code.web.app') importlib.sys.modules.pop('code.web.app')
from code.web.app import app # type: ignore from code.web.app import app
return TestClient(app) return TestClient(app)

View file

@ -2,8 +2,8 @@ from __future__ import annotations
import pytest import pytest
from code.web.services.theme_preview import get_theme_preview # type: ignore from code.web.services.theme_preview import get_theme_preview
from code.web.services.theme_catalog_loader import load_index, slugify, project_detail # type: ignore from code.web.services.theme_catalog_loader import load_index, slugify, project_detail
@pytest.mark.parametrize("limit", [8, 12]) @pytest.mark.parametrize("limit", [8, 12])

View file

@ -1,7 +1,7 @@
import os import os
import time import time
import json import json
from code.web.services.theme_preview import get_theme_preview, preview_metrics, bust_preview_cache # type: ignore from code.web.services.theme_preview import get_theme_preview, preview_metrics, bust_preview_cache
def test_colors_filter_constraint_green_subset(): def test_colors_filter_constraint_green_subset():

View file

@ -47,10 +47,10 @@ class DummySpellBuilder(SpellAdditionMixin):
def rng(self) -> DummyRNG: def rng(self) -> DummyRNG:
return self._rng return self._rng
def get_theme_context(self) -> ThemeContext: # type: ignore[override] def get_theme_context(self) -> ThemeContext:
return self._theme_context return self._theme_context
def add_card(self, name: str, **kwargs: Any) -> None: # type: ignore[override] def add_card(self, name: str, **kwargs: Any) -> None:
self.card_library[name] = {"Count": kwargs.get("count", 1)} self.card_library[name] = {"Count": kwargs.get("count", 1)}
self.added_cards.append(name) self.added_cards.append(name)

View file

@ -20,7 +20,7 @@ def _fresh_client() -> TestClient:
from code.web.services.commander_catalog_loader import clear_commander_catalog_cache from code.web.services.commander_catalog_loader import clear_commander_catalog_cache
clear_commander_catalog_cache() clear_commander_catalog_cache()
from code.web.app import app # type: ignore from code.web.app import app
client = TestClient(app) client = TestClient(app)
from code.web.services import tasks from code.web.services import tasks

View file

@ -8,7 +8,7 @@ from pathlib import Path
def get_unused_ignores(): def get_unused_ignores():
"""Run mypy and extract all unused-ignore errors.""" """Run mypy and extract all unused-ignore errors."""
result = subprocess.run( result = subprocess.run(
['python', '-m', 'mypy', 'code/web/', '--show-error-codes'], ['python', '-m', 'mypy', 'code', '--show-error-codes'],
capture_output=True, capture_output=True,
text=True, text=True,
cwd=Path(__file__).parent cwd=Path(__file__).parent