mirror of
https://github.com/mwisnowski/mtg_python_deckbuilder.git
synced 2025-12-16 07:30:13 +01:00
fix(lint): improved type checking and code maintainability
This commit is contained in:
parent
83fe527979
commit
40023e93b8
62 changed files with 187 additions and 197 deletions
23
CHANGELOG.md
23
CHANGELOG.md
|
|
@ -9,9 +9,9 @@ This format follows Keep a Changelog principles and aims for Semantic Versioning
|
|||
|
||||
## [Unreleased]
|
||||
### Added
|
||||
- **Code Quality Improvements**: Enhanced type checking and code quality standards
|
||||
- Configured gradual strict mode for Python type checking
|
||||
- Created automated utilities for maintaining clean codebase
|
||||
- **Code Quality Tools**: Enhanced development tooling for maintainability
|
||||
- Automated utilities for code cleanup
|
||||
- Improved type checking configuration
|
||||
- **Card Image Caching**: Optional local image cache for faster card display
|
||||
- Downloads card images from Scryfall bulk data (respects API guidelines)
|
||||
- Graceful fallback to Scryfall API for uncached images
|
||||
|
|
@ -76,22 +76,19 @@ This format follows Keep a Changelog principles and aims for Semantic Versioning
|
|||
|
||||
### Changed
|
||||
- Migrated 5 templates to new component system (home, 404, 500, setup, commanders)
|
||||
- **Type Checking Configuration**: Adjusted mypy settings for better developer experience
|
||||
- Enabled gradual strict mode for incremental type safety improvements
|
||||
- Configured per-module strict checks for new code
|
||||
- **Type Checking Configuration**: Improved Python code quality tooling
|
||||
- Configured type checker for better error detection
|
||||
- Optimized linting rules for development workflow
|
||||
|
||||
### Fixed
|
||||
- **Code Quality**: Resolved numerous type checking warnings and improved code maintainability
|
||||
- Fixed critical type annotation bugs
|
||||
- Removed outdated type ignore comments
|
||||
- Corrected dictionary type definitions
|
||||
- **Code Quality**: Resolved type checking warnings and improved code maintainability
|
||||
- Fixed type annotation inconsistencies
|
||||
- Cleaned up redundant code quality suppressions
|
||||
- Corrected configuration conflicts
|
||||
|
||||
### Removed
|
||||
_None_
|
||||
|
||||
### Fixed
|
||||
_None_
|
||||
|
||||
### Performance
|
||||
- Hot reload for CSS/template changes (no Docker rebuild needed)
|
||||
- Optional image caching reduces Scryfall API calls
|
||||
|
|
|
|||
|
|
@ -3,12 +3,12 @@
|
|||
## [Unreleased]
|
||||
|
||||
### Summary
|
||||
Web UI improvements with Tailwind CSS migration, TypeScript conversion, component library, enhanced code quality standards, and optional card image caching for faster performance and better maintainability.
|
||||
Web UI improvements with Tailwind CSS migration, TypeScript conversion, component library, enhanced code quality tools, and optional card image caching for faster performance and better maintainability.
|
||||
|
||||
### Added
|
||||
- **Code Quality Improvements**: Enhanced type checking and code quality standards
|
||||
- Configured gradual strict mode for Python type checking
|
||||
- Created automated utilities for maintaining clean codebase
|
||||
- **Code Quality Tools**: Enhanced development tooling for maintainability
|
||||
- Automated utilities for code cleanup
|
||||
- Improved type checking configuration
|
||||
- **Card Image Caching**: Optional local image cache for faster card display
|
||||
- Downloads card images from Scryfall bulk data (respects API guidelines)
|
||||
- Graceful fallback to Scryfall API for uncached images
|
||||
|
|
@ -51,9 +51,9 @@ Web UI improvements with Tailwind CSS migration, TypeScript conversion, componen
|
|||
- Hot reload enabled for templates and static files
|
||||
- Volume mounts for rapid iteration without rebuilds
|
||||
- **Template Modernization**: Migrated templates to use component system
|
||||
- **Type Checking Configuration**: Adjusted mypy settings for better developer experience
|
||||
- Enabled gradual strict mode for incremental type safety improvements
|
||||
- Configured per-module strict checks for new code
|
||||
- **Type Checking Configuration**: Improved Python code quality tooling
|
||||
- Configured type checker for better error detection
|
||||
- Optimized linting rules for development workflow
|
||||
- **Intelligent Synergy Builder**: Analyze multiple builds and create optimized "best-of" deck
|
||||
- Scores cards by frequency (50%), EDHREC rank (25%), and theme tags (25%)
|
||||
- 10% bonus for cards appearing in 80%+ of builds
|
||||
|
|
@ -78,10 +78,10 @@ Web UI improvements with Tailwind CSS migration, TypeScript conversion, componen
|
|||
_None_
|
||||
|
||||
### Fixed
|
||||
- **Code Quality**: Resolved numerous type checking warnings and improved code maintainability
|
||||
- Fixed critical type annotation bugs
|
||||
- Removed outdated type ignore comments
|
||||
- Corrected dictionary type definitions
|
||||
- **Code Quality**: Resolved type checking warnings and improved code maintainability
|
||||
- Fixed type annotation inconsistencies
|
||||
- Cleaned up redundant code quality suppressions
|
||||
- Corrected configuration conflicts
|
||||
|
||||
### Performance
|
||||
- Hot reload for CSS/template changes (no Docker rebuild needed)
|
||||
|
|
|
|||
|
|
@ -88,12 +88,12 @@ def _candidate_pool_for_role(builder, role: str) -> List[Tuple[str, dict]]:
|
|||
# Sort by edhrecRank then manaValue
|
||||
try:
|
||||
from . import builder_utils as bu
|
||||
sorted_df = bu.sort_by_priority(pool, ["edhrecRank", "manaValue"]) # type: ignore[attr-defined]
|
||||
sorted_df = bu.sort_by_priority(pool, ["edhrecRank", "manaValue"])
|
||||
# Prefer-owned bias
|
||||
if getattr(builder, "prefer_owned", False):
|
||||
owned = getattr(builder, "owned_card_names", None)
|
||||
if owned:
|
||||
sorted_df = bu.prefer_owned_first(sorted_df, {str(n).lower() for n in owned}) # type: ignore[attr-defined]
|
||||
sorted_df = bu.prefer_owned_first(sorted_df, {str(n).lower() for n in owned})
|
||||
except Exception:
|
||||
sorted_df = pool
|
||||
|
||||
|
|
@ -363,7 +363,7 @@ def enforce_bracket_compliance(builder, mode: str = "prompt") -> Dict:
|
|||
break
|
||||
# Rank candidates: break the most combos first; break ties by worst desirability
|
||||
cand_names = list(freq.keys())
|
||||
cand_names.sort(key=lambda nm: (-int(freq.get(nm, 0)), _score(nm)), reverse=False) # type: ignore[arg-type]
|
||||
cand_names.sort(key=lambda nm: (-int(freq.get(nm, 0)), _score(nm)), reverse=False)
|
||||
removed_any = False
|
||||
for nm in cand_names:
|
||||
if nm in blocked:
|
||||
|
|
|
|||
|
|
@ -17,7 +17,7 @@ from logging_util import get_logger
|
|||
logger = get_logger(__name__)
|
||||
|
||||
try: # Optional pandas import for type checking without heavy dependency at runtime.
|
||||
import pandas as _pd # type: ignore
|
||||
import pandas as _pd
|
||||
except Exception: # pragma: no cover - tests provide DataFrame-like objects.
|
||||
_pd = None # type: ignore
|
||||
|
||||
|
|
@ -267,7 +267,7 @@ def _find_commander_row(df: Any, name: str | None):
|
|||
if not target:
|
||||
return None
|
||||
|
||||
if _pd is not None and isinstance(df, _pd.DataFrame): # type: ignore
|
||||
if _pd is not None and isinstance(df, _pd.DataFrame):
|
||||
columns = [col for col in ("name", "faceName") if col in df.columns]
|
||||
for col in columns:
|
||||
series = df[col].astype(str).str.casefold()
|
||||
|
|
|
|||
|
|
@ -885,7 +885,7 @@ def _filter_multi(df: pd.DataFrame, primary: Optional[str], secondary: Optional[
|
|||
if index_map is None:
|
||||
_ensure_theme_tag_index(current_df)
|
||||
index_map = current_df.attrs.get("_ltag_index") or {}
|
||||
return index_map # type: ignore[return-value]
|
||||
return index_map
|
||||
|
||||
index_map_all = _get_index_map(df)
|
||||
|
||||
|
|
@ -1047,7 +1047,7 @@ def _check_constraints(candidate_count: int, constraints: Optional[Dict[str, Any
|
|||
if not constraints:
|
||||
return
|
||||
try:
|
||||
req_min = constraints.get("require_min_candidates") # type: ignore[attr-defined]
|
||||
req_min = constraints.get("require_min_candidates")
|
||||
except Exception:
|
||||
req_min = None
|
||||
if req_min is None:
|
||||
|
|
@ -1436,7 +1436,7 @@ def build_random_full_deck(
|
|||
primary_choice_idx, secondary_choice_idx, tertiary_choice_idx = _resolve_theme_choices_for_headless(base.commander, base)
|
||||
|
||||
try:
|
||||
from headless_runner import run as _run # type: ignore
|
||||
from headless_runner import run as _run
|
||||
except Exception as e:
|
||||
return RandomFullBuildResult(
|
||||
seed=base.seed,
|
||||
|
|
@ -1482,7 +1482,7 @@ def build_random_full_deck(
|
|||
summary: Dict[str, Any] | None = None
|
||||
try:
|
||||
if hasattr(builder, 'build_deck_summary'):
|
||||
summary = builder.build_deck_summary() # type: ignore[attr-defined]
|
||||
summary = builder.build_deck_summary()
|
||||
except Exception:
|
||||
summary = None
|
||||
|
||||
|
|
@ -1559,7 +1559,7 @@ def build_random_full_deck(
|
|||
if isinstance(custom_base, str) and custom_base.strip():
|
||||
meta_payload["name"] = custom_base.strip()
|
||||
try:
|
||||
commander_meta = builder.get_commander_export_metadata() # type: ignore[attr-defined]
|
||||
commander_meta = builder.get_commander_export_metadata()
|
||||
except Exception:
|
||||
commander_meta = {}
|
||||
names = commander_meta.get("commander_names") or []
|
||||
|
|
@ -1589,8 +1589,8 @@ def build_random_full_deck(
|
|||
try:
|
||||
import os as _os
|
||||
import json as _json
|
||||
csv_path = getattr(builder, 'last_csv_path', None) # type: ignore[attr-defined]
|
||||
txt_path = getattr(builder, 'last_txt_path', None) # type: ignore[attr-defined]
|
||||
csv_path = getattr(builder, 'last_csv_path', None)
|
||||
txt_path = getattr(builder, 'last_txt_path', None)
|
||||
if csv_path and isinstance(csv_path, str):
|
||||
base_path, _ = _os.path.splitext(csv_path)
|
||||
# If txt missing but expected, look for sibling
|
||||
|
|
@ -1608,7 +1608,7 @@ def build_random_full_deck(
|
|||
# Compute compliance if not already saved
|
||||
try:
|
||||
if hasattr(builder, 'compute_and_print_compliance'):
|
||||
compliance = builder.compute_and_print_compliance(base_stem=_os.path.basename(base_path)) # type: ignore[attr-defined]
|
||||
compliance = builder.compute_and_print_compliance(base_stem=_os.path.basename(base_path))
|
||||
except Exception:
|
||||
compliance = None
|
||||
# Write summary sidecar if missing
|
||||
|
|
@ -1646,7 +1646,7 @@ def build_random_full_deck(
|
|||
csv_path = existing_base
|
||||
base_path, _ = _os.path.splitext(csv_path)
|
||||
else:
|
||||
tmp_csv = builder.export_decklist_csv() # type: ignore[attr-defined]
|
||||
tmp_csv = builder.export_decklist_csv()
|
||||
stem_base, ext = _os.path.splitext(tmp_csv)
|
||||
if stem_base.endswith('_1'):
|
||||
original = stem_base[:-2] + ext
|
||||
|
|
@ -1662,13 +1662,13 @@ def build_random_full_deck(
|
|||
if _os.path.isfile(target_txt):
|
||||
txt_path = target_txt
|
||||
else:
|
||||
tmp_txt = builder.export_decklist_text(filename=_os.path.basename(base_path) + '.txt') # type: ignore[attr-defined]
|
||||
tmp_txt = builder.export_decklist_text(filename=_os.path.basename(base_path) + '.txt')
|
||||
if tmp_txt.endswith('_1.txt') and _os.path.isfile(target_txt):
|
||||
txt_path = target_txt
|
||||
else:
|
||||
txt_path = tmp_txt
|
||||
if hasattr(builder, 'compute_and_print_compliance'):
|
||||
compliance = builder.compute_and_print_compliance(base_stem=_os.path.basename(base_path)) # type: ignore[attr-defined]
|
||||
compliance = builder.compute_and_print_compliance(base_stem=_os.path.basename(base_path))
|
||||
if summary:
|
||||
sidecar = base_path + '.summary.json'
|
||||
if not _os.path.isfile(sidecar):
|
||||
|
|
|
|||
|
|
@ -183,7 +183,7 @@ def _iter_json_themes(payload: object) -> Iterable[ThemeCatalogEntry]:
|
|||
try:
|
||||
from type_definitions_theme_catalog import ThemeCatalog # pragma: no cover - primary import path
|
||||
except ImportError: # pragma: no cover - fallback when running as package
|
||||
from code.type_definitions_theme_catalog import ThemeCatalog # type: ignore
|
||||
from code.type_definitions_theme_catalog import ThemeCatalog
|
||||
|
||||
try:
|
||||
catalog = ThemeCatalog.model_validate(payload)
|
||||
|
|
|
|||
|
|
@ -40,7 +40,7 @@ from typing import List, Dict, Any
|
|||
|
||||
# Third-party imports (optional)
|
||||
try:
|
||||
import inquirer # type: ignore
|
||||
import inquirer
|
||||
except Exception:
|
||||
inquirer = None # Fallback to simple input-based menu when unavailable
|
||||
import pandas as pd
|
||||
|
|
|
|||
|
|
@ -40,7 +40,7 @@ from typing import List, Dict, Any
|
|||
|
||||
# Third-party imports (optional)
|
||||
try:
|
||||
import inquirer # type: ignore
|
||||
import inquirer
|
||||
except Exception:
|
||||
inquirer = None # Fallback to simple input-based menu when unavailable
|
||||
import pandas as pd
|
||||
|
|
|
|||
|
|
@ -139,7 +139,7 @@ def _validate_commander_available(command_name: str) -> None:
|
|||
return
|
||||
|
||||
try:
|
||||
from commander_exclusions import lookup_commander_detail as _lookup_commander_detail # type: ignore[import-not-found]
|
||||
from commander_exclusions import lookup_commander_detail as _lookup_commander_detail
|
||||
except ImportError: # pragma: no cover
|
||||
_lookup_commander_detail = None
|
||||
|
||||
|
|
@ -281,12 +281,12 @@ def run(
|
|||
# Optional deterministic seed for Random Modes (does not affect core when unset)
|
||||
try:
|
||||
if seed is not None:
|
||||
builder.set_seed(seed) # type: ignore[attr-defined]
|
||||
builder.set_seed(seed)
|
||||
except Exception:
|
||||
pass
|
||||
# Mark this run as headless so builder can adjust exports and logging
|
||||
try:
|
||||
builder.headless = True # type: ignore[attr-defined]
|
||||
builder.headless = True
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
|
@ -294,9 +294,9 @@ def run(
|
|||
secondary_clean = (secondary_commander or "").strip()
|
||||
background_clean = (background or "").strip()
|
||||
try:
|
||||
builder.partner_feature_enabled = partner_feature_enabled # type: ignore[attr-defined]
|
||||
builder.requested_secondary_commander = secondary_clean or None # type: ignore[attr-defined]
|
||||
builder.requested_background = background_clean or None # type: ignore[attr-defined]
|
||||
builder.partner_feature_enabled = partner_feature_enabled
|
||||
builder.requested_secondary_commander = secondary_clean or None
|
||||
builder.requested_background = background_clean or None
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
|
@ -313,11 +313,11 @@ def run(
|
|||
|
||||
# Configure include/exclude settings (M1: Config + Validation + Persistence)
|
||||
try:
|
||||
builder.include_cards = list(include_cards or []) # type: ignore[attr-defined]
|
||||
builder.exclude_cards = list(exclude_cards or []) # type: ignore[attr-defined]
|
||||
builder.enforcement_mode = enforcement_mode # type: ignore[attr-defined]
|
||||
builder.allow_illegal = allow_illegal # type: ignore[attr-defined]
|
||||
builder.fuzzy_matching = fuzzy_matching # type: ignore[attr-defined]
|
||||
builder.include_cards = list(include_cards or [])
|
||||
builder.exclude_cards = list(exclude_cards or [])
|
||||
builder.enforcement_mode = enforcement_mode
|
||||
builder.allow_illegal = allow_illegal
|
||||
builder.fuzzy_matching = fuzzy_matching
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
|
@ -336,16 +336,16 @@ def run(
|
|||
)
|
||||
|
||||
try:
|
||||
builder.theme_match_mode = theme_resolution.mode # type: ignore[attr-defined]
|
||||
builder.theme_catalog_version = theme_resolution.catalog_version # type: ignore[attr-defined]
|
||||
builder.user_theme_requested = list(theme_resolution.requested) # type: ignore[attr-defined]
|
||||
builder.user_theme_resolved = list(theme_resolution.resolved) # type: ignore[attr-defined]
|
||||
builder.user_theme_matches = list(theme_resolution.matches) # type: ignore[attr-defined]
|
||||
builder.user_theme_unresolved = list(theme_resolution.unresolved) # type: ignore[attr-defined]
|
||||
builder.user_theme_fuzzy_corrections = dict(theme_resolution.fuzzy_corrections) # type: ignore[attr-defined]
|
||||
builder.user_theme_resolution = theme_resolution # type: ignore[attr-defined]
|
||||
builder.theme_match_mode = theme_resolution.mode
|
||||
builder.theme_catalog_version = theme_resolution.catalog_version
|
||||
builder.user_theme_requested = list(theme_resolution.requested)
|
||||
builder.user_theme_resolved = list(theme_resolution.resolved)
|
||||
builder.user_theme_matches = list(theme_resolution.matches)
|
||||
builder.user_theme_unresolved = list(theme_resolution.unresolved)
|
||||
builder.user_theme_fuzzy_corrections = dict(theme_resolution.fuzzy_corrections)
|
||||
builder.user_theme_resolution = theme_resolution
|
||||
if user_theme_weight is not None:
|
||||
builder.user_theme_weight = float(user_theme_weight) # type: ignore[attr-defined]
|
||||
builder.user_theme_weight = float(user_theme_weight)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
|
@ -356,7 +356,7 @@ def run(
|
|||
ic: Dict[str, int] = {}
|
||||
for k, v in ideal_counts.items():
|
||||
try:
|
||||
iv = int(v) if v is not None else None # type: ignore
|
||||
iv = int(v) if v is not None else None
|
||||
except Exception:
|
||||
continue
|
||||
if iv is None:
|
||||
|
|
@ -365,7 +365,7 @@ def run(
|
|||
if k in {"ramp","lands","basic_lands","creatures","removal","wipes","card_advantage","protection"}:
|
||||
ic[k] = iv
|
||||
if ic:
|
||||
builder.ideal_counts.update(ic) # type: ignore[attr-defined]
|
||||
builder.ideal_counts.update(ic)
|
||||
except Exception:
|
||||
pass
|
||||
builder.run_initial_setup()
|
||||
|
|
@ -518,24 +518,24 @@ def _apply_combined_commander_to_builder(builder: DeckBuilder, combined_commande
|
|||
"""Attach combined commander metadata to the builder for downstream use."""
|
||||
|
||||
try:
|
||||
builder.combined_commander = combined_commander # type: ignore[attr-defined]
|
||||
builder.combined_commander = combined_commander
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
try:
|
||||
builder.partner_mode = combined_commander.partner_mode # type: ignore[attr-defined]
|
||||
builder.partner_mode = combined_commander.partner_mode
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
try:
|
||||
builder.secondary_commander = combined_commander.secondary_name # type: ignore[attr-defined]
|
||||
builder.secondary_commander = combined_commander.secondary_name
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
try:
|
||||
builder.combined_color_identity = combined_commander.color_identity # type: ignore[attr-defined]
|
||||
builder.combined_theme_tags = combined_commander.theme_tags # type: ignore[attr-defined]
|
||||
builder.partner_warnings = combined_commander.warnings # type: ignore[attr-defined]
|
||||
builder.combined_color_identity = combined_commander.color_identity
|
||||
builder.combined_theme_tags = combined_commander.theme_tags
|
||||
builder.partner_warnings = combined_commander.warnings
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
|
@ -557,7 +557,7 @@ def _export_outputs(builder: DeckBuilder) -> None:
|
|||
# Persist for downstream reuse (e.g., random_entrypoint / reroll flows) so they don't re-export
|
||||
if csv_path:
|
||||
try:
|
||||
builder.last_csv_path = csv_path # type: ignore[attr-defined]
|
||||
builder.last_csv_path = csv_path
|
||||
except Exception:
|
||||
pass
|
||||
except Exception:
|
||||
|
|
@ -572,7 +572,7 @@ def _export_outputs(builder: DeckBuilder) -> None:
|
|||
finally:
|
||||
if txt_generated:
|
||||
try:
|
||||
builder.last_txt_path = txt_generated # type: ignore[attr-defined]
|
||||
builder.last_txt_path = txt_generated
|
||||
except Exception:
|
||||
pass
|
||||
else:
|
||||
|
|
@ -582,7 +582,7 @@ def _export_outputs(builder: DeckBuilder) -> None:
|
|||
finally:
|
||||
if txt_generated:
|
||||
try:
|
||||
builder.last_txt_path = txt_generated # type: ignore[attr-defined]
|
||||
builder.last_txt_path = txt_generated
|
||||
except Exception:
|
||||
pass
|
||||
except Exception:
|
||||
|
|
@ -1196,7 +1196,7 @@ def _run_random_mode(config: RandomRunConfig) -> int:
|
|||
RandomConstraintsImpossibleError,
|
||||
RandomThemeNoMatchError,
|
||||
build_random_full_deck,
|
||||
) # type: ignore
|
||||
)
|
||||
except Exception as exc:
|
||||
print(f"Random mode unavailable: {exc}")
|
||||
return 1
|
||||
|
|
|
|||
|
|
@ -36,7 +36,7 @@ except Exception: # pragma: no cover
|
|||
|
||||
try:
|
||||
# Support running as `python code/scripts/build_theme_catalog.py` when 'code' already on path
|
||||
from scripts.extract_themes import ( # type: ignore
|
||||
from scripts.extract_themes import (
|
||||
BASE_COLORS,
|
||||
collect_theme_tags_from_constants,
|
||||
collect_theme_tags_from_tagger_source,
|
||||
|
|
@ -51,7 +51,7 @@ try:
|
|||
)
|
||||
except ModuleNotFoundError:
|
||||
# Fallback: direct relative import when running within scripts package context
|
||||
from extract_themes import ( # type: ignore
|
||||
from extract_themes import (
|
||||
BASE_COLORS,
|
||||
collect_theme_tags_from_constants,
|
||||
collect_theme_tags_from_tagger_source,
|
||||
|
|
@ -66,7 +66,7 @@ except ModuleNotFoundError:
|
|||
)
|
||||
|
||||
try:
|
||||
from scripts.export_themes_to_yaml import slugify as slugify_theme # type: ignore
|
||||
from scripts.export_themes_to_yaml import slugify as slugify_theme
|
||||
except Exception:
|
||||
_SLUG_RE = re.compile(r'[^a-z0-9-]')
|
||||
|
||||
|
|
@ -951,7 +951,7 @@ def main(): # pragma: no cover
|
|||
if args.schema:
|
||||
# Lazy import to avoid circular dependency: replicate minimal schema inline from models file if present
|
||||
try:
|
||||
from type_definitions_theme_catalog import ThemeCatalog # type: ignore
|
||||
from type_definitions_theme_catalog import ThemeCatalog
|
||||
import json as _json
|
||||
print(_json.dumps(ThemeCatalog.model_json_schema(), indent=2))
|
||||
return
|
||||
|
|
@ -990,8 +990,8 @@ def main(): # pragma: no cover
|
|||
# Safeguard: if catalog dir missing, attempt to auto-export Phase A YAML first
|
||||
if not CATALOG_DIR.exists(): # pragma: no cover (environmental)
|
||||
try:
|
||||
from scripts.export_themes_to_yaml import main as export_main # type: ignore
|
||||
export_main(['--force']) # type: ignore[arg-type]
|
||||
from scripts.export_themes_to_yaml import main as export_main
|
||||
export_main(['--force'])
|
||||
except Exception as _e:
|
||||
print(f"[build_theme_catalog] WARNING: catalog dir missing and auto export failed: {_e}", file=sys.stderr)
|
||||
if yaml is None:
|
||||
|
|
@ -1013,7 +1013,7 @@ def main(): # pragma: no cover
|
|||
meta_block = raw.get('metadata_info') if isinstance(raw.get('metadata_info'), dict) else {}
|
||||
# Legacy migration: if no metadata_info but legacy provenance present, adopt it
|
||||
if not meta_block and isinstance(raw.get('provenance'), dict):
|
||||
meta_block = raw.get('provenance') # type: ignore
|
||||
meta_block = raw.get('provenance')
|
||||
changed = True
|
||||
if force or not meta_block.get('last_backfill'):
|
||||
meta_block['last_backfill'] = time.strftime('%Y-%m-%dT%H:%M:%S')
|
||||
|
|
|
|||
|
|
@ -41,7 +41,7 @@ SCRIPT_ROOT = Path(__file__).resolve().parent
|
|||
CODE_ROOT = SCRIPT_ROOT.parent
|
||||
if str(CODE_ROOT) not in sys.path:
|
||||
sys.path.insert(0, str(CODE_ROOT))
|
||||
from scripts.extract_themes import derive_synergies_for_tags # type: ignore
|
||||
from scripts.extract_themes import derive_synergies_for_tags
|
||||
|
||||
ROOT = Path(__file__).resolve().parents[2]
|
||||
THEME_JSON = ROOT / 'config' / 'themes' / 'theme_list.json'
|
||||
|
|
|
|||
|
|
@ -18,8 +18,8 @@ ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..'))
|
|||
if ROOT not in sys.path:
|
||||
sys.path.insert(0, ROOT)
|
||||
|
||||
from code.settings import CSV_DIRECTORY # type: ignore
|
||||
from code.tagging import tag_constants # type: ignore
|
||||
from code.settings import CSV_DIRECTORY
|
||||
from code.tagging import tag_constants
|
||||
|
||||
BASE_COLORS = {
|
||||
'white': 'W',
|
||||
|
|
|
|||
|
|
@ -32,7 +32,7 @@ if str(CODE_ROOT) not in sys.path:
|
|||
sys.path.insert(0, str(CODE_ROOT))
|
||||
|
||||
try:
|
||||
from code.settings import CSV_DIRECTORY as DEFAULT_CSV_DIRECTORY # type: ignore
|
||||
from code.settings import CSV_DIRECTORY as DEFAULT_CSV_DIRECTORY
|
||||
except Exception: # pragma: no cover - fallback for adhoc execution
|
||||
DEFAULT_CSV_DIRECTORY = "csv_files"
|
||||
|
||||
|
|
|
|||
|
|
@ -42,7 +42,7 @@ def _sample_combinations(tags: List[str], iterations: int) -> List[Tuple[str | N
|
|||
|
||||
def _collect_tag_pool(df: pd.DataFrame) -> List[str]:
|
||||
tag_pool: set[str] = set()
|
||||
for tags in df.get("_ltags", []): # type: ignore[assignment]
|
||||
for tags in df.get("_ltags", []):
|
||||
if not tags:
|
||||
continue
|
||||
for token in tags:
|
||||
|
|
|
|||
|
|
@ -37,7 +37,7 @@ def _refresh_setup() -> None:
|
|||
|
||||
def _refresh_tags() -> None:
|
||||
tagger = importlib.import_module("code.tagging.tagger")
|
||||
tagger = importlib.reload(tagger) # type: ignore[assignment]
|
||||
tagger = importlib.reload(tagger)
|
||||
for color in SUPPORTED_COLORS:
|
||||
tagger.load_dataframe(color)
|
||||
|
||||
|
|
|
|||
|
|
@ -21,7 +21,7 @@ PROJECT_ROOT = Path(__file__).resolve().parents[1]
|
|||
if str(PROJECT_ROOT) not in sys.path:
|
||||
sys.path.append(str(PROJECT_ROOT))
|
||||
|
||||
from deck_builder.random_entrypoint import ( # type: ignore # noqa: E402
|
||||
from deck_builder.random_entrypoint import ( # noqa: E402
|
||||
_build_random_theme_pool,
|
||||
_ensure_theme_tag_cache,
|
||||
_load_commanders_df,
|
||||
|
|
|
|||
|
|
@ -731,7 +731,7 @@ def main(): # pragma: no cover (script orchestration)
|
|||
if cand:
|
||||
theme_card_hits[display] = cand
|
||||
# Build global duplicate frequency map ONCE (baseline prior to this run) if threshold active
|
||||
if args.common_card_threshold > 0 and 'GLOBAL_CARD_FREQ' not in globals(): # type: ignore
|
||||
if args.common_card_threshold > 0 and 'GLOBAL_CARD_FREQ' not in globals():
|
||||
freq: Dict[str, int] = {}
|
||||
total_themes = 0
|
||||
for fp0 in CATALOG_DIR.glob('*.yml'):
|
||||
|
|
@ -748,10 +748,10 @@ def main(): # pragma: no cover (script orchestration)
|
|||
continue
|
||||
seen_local.add(c)
|
||||
freq[c] = freq.get(c, 0) + 1
|
||||
globals()['GLOBAL_CARD_FREQ'] = (freq, total_themes) # type: ignore
|
||||
globals()['GLOBAL_CARD_FREQ'] = (freq, total_themes)
|
||||
# Apply duplicate filtering to candidate lists (do NOT mutate existing example_cards)
|
||||
if args.common_card_threshold > 0 and 'GLOBAL_CARD_FREQ' in globals(): # type: ignore
|
||||
freq_map, total_prev = globals()['GLOBAL_CARD_FREQ'] # type: ignore
|
||||
if args.common_card_threshold > 0 and 'GLOBAL_CARD_FREQ' in globals():
|
||||
freq_map, total_prev = globals()['GLOBAL_CARD_FREQ']
|
||||
if total_prev > 0: # avoid div-by-zero
|
||||
cutoff = args.common_card_threshold
|
||||
def _filter(lst: List[Tuple[float, str, Set[str]]]) -> List[Tuple[float, str, Set[str]]]:
|
||||
|
|
@ -803,8 +803,8 @@ def main(): # pragma: no cover (script orchestration)
|
|||
print(f"[promote] modified {changed_count} themes")
|
||||
if args.fill_example_cards:
|
||||
print(f"[cards] modified {cards_changed} themes (target {args.cards_target})")
|
||||
if args.print_dup_metrics and 'GLOBAL_CARD_FREQ' in globals(): # type: ignore
|
||||
freq_map, total_prev = globals()['GLOBAL_CARD_FREQ'] # type: ignore
|
||||
if args.print_dup_metrics and 'GLOBAL_CARD_FREQ' in globals():
|
||||
freq_map, total_prev = globals()['GLOBAL_CARD_FREQ']
|
||||
if total_prev:
|
||||
items = sorted(freq_map.items(), key=lambda x: (-x[1], x[0]))[:30]
|
||||
print('[dup-metrics] Top shared example_cards (baseline before this run):')
|
||||
|
|
|
|||
|
|
@ -31,9 +31,9 @@ CODE_ROOT = ROOT / 'code'
|
|||
if str(CODE_ROOT) not in sys.path:
|
||||
sys.path.insert(0, str(CODE_ROOT))
|
||||
|
||||
from type_definitions_theme_catalog import ThemeCatalog, ThemeYAMLFile # type: ignore
|
||||
from scripts.extract_themes import load_whitelist_config # type: ignore
|
||||
from scripts.build_theme_catalog import build_catalog # type: ignore
|
||||
from type_definitions_theme_catalog import ThemeCatalog, ThemeYAMLFile
|
||||
from scripts.extract_themes import load_whitelist_config
|
||||
from scripts.build_theme_catalog import build_catalog
|
||||
|
||||
CATALOG_JSON = ROOT / 'config' / 'themes' / 'theme_list.json'
|
||||
|
||||
|
|
|
|||
|
|
@ -89,11 +89,8 @@ COLUMN_ORDER = CARD_COLUMN_ORDER
|
|||
TAGGED_COLUMN_ORDER = CARD_COLUMN_ORDER
|
||||
REQUIRED_COLUMNS = REQUIRED_CARD_COLUMNS
|
||||
|
||||
MAIN_MENU_ITEMS: List[str] = ['Build A Deck', 'Setup CSV Files', 'Tag CSV Files', 'Quit']
|
||||
# MAIN_MENU_ITEMS, SETUP_MENU_ITEMS, CSV_DIRECTORY already defined above (lines 67-70)
|
||||
|
||||
SETUP_MENU_ITEMS: List[str] = ['Initial Setup', 'Regenerate CSV', 'Main Menu']
|
||||
|
||||
CSV_DIRECTORY: str = 'csv_files'
|
||||
CARD_FILES_DIRECTORY: str = 'card_files' # Parquet files for consolidated card data
|
||||
|
||||
# ----------------------------------------------------------------------------------
|
||||
|
|
@ -111,11 +108,7 @@ CARD_FILES_PROCESSED_DIR = os.getenv('CARD_FILES_PROCESSED_DIR', os.path.join(CA
|
|||
# Set to '1' or 'true' to enable CSV fallback when Parquet loading fails
|
||||
LEGACY_CSV_COMPAT = os.getenv('LEGACY_CSV_COMPAT', '0').lower() in ('1', 'true', 'on', 'enabled')
|
||||
|
||||
# Configuration for handling null/NA values in DataFrame columns
|
||||
FILL_NA_COLUMNS: Dict[str, Optional[str]] = {
|
||||
'colorIdentity': 'Colorless', # Default color identity for cards without one
|
||||
'faceName': None # Use card's name column value when face name is not available
|
||||
}
|
||||
# FILL_NA_COLUMNS already defined above (lines 75-78)
|
||||
|
||||
# ----------------------------------------------------------------------------------
|
||||
# ALL CARDS CONSOLIDATION FEATURE FLAG
|
||||
|
|
|
|||
|
|
@ -30,14 +30,14 @@ try:
|
|||
import logging_util
|
||||
except Exception:
|
||||
# Fallback for direct module loading
|
||||
import importlib.util # type: ignore
|
||||
import importlib.util
|
||||
root = Path(__file__).resolve().parents[1]
|
||||
lu_path = root / 'logging_util.py'
|
||||
spec = importlib.util.spec_from_file_location('logging_util', str(lu_path))
|
||||
mod = importlib.util.module_from_spec(spec) # type: ignore[arg-type]
|
||||
assert spec and spec.loader
|
||||
spec.loader.exec_module(mod) # type: ignore[assignment]
|
||||
logging_util = mod # type: ignore
|
||||
spec.loader.exec_module(mod)
|
||||
logging_util = mod
|
||||
|
||||
logger = logging_util.logging.getLogger(__name__)
|
||||
logger.setLevel(logging_util.LOG_LEVEL)
|
||||
|
|
|
|||
|
|
@ -173,7 +173,7 @@ def _merge_summary_recorder(color: str):
|
|||
|
||||
|
||||
def _write_compat_snapshot(df: pd.DataFrame, color: str) -> None:
|
||||
try: # type: ignore[name-defined]
|
||||
try:
|
||||
_DFC_COMPAT_DIR.mkdir(parents=True, exist_ok=True)
|
||||
path = _DFC_COMPAT_DIR / f"{color}_cards_unmerged.csv"
|
||||
df.to_csv(path, index=False)
|
||||
|
|
|
|||
|
|
@ -173,7 +173,7 @@ def _merge_summary_recorder(color: str):
|
|||
|
||||
def _write_compat_snapshot(df: pd.DataFrame, color: str) -> None:
|
||||
"""Write DFC compatibility snapshot (diagnostic output, kept as CSV for now)."""
|
||||
try: # type: ignore[name-defined]
|
||||
try:
|
||||
_DFC_COMPAT_DIR.mkdir(parents=True, exist_ok=True)
|
||||
path = _DFC_COMPAT_DIR / f"{color}_cards_unmerged.csv"
|
||||
df.to_csv(path, index=False) # M3: Kept as CSV (diagnostic only, not main data flow)
|
||||
|
|
|
|||
|
|
@ -11,9 +11,9 @@ def _load_applier():
|
|||
root = Path(__file__).resolve().parents[2]
|
||||
mod_path = root / 'code' / 'tagging' / 'bracket_policy_applier.py'
|
||||
spec = importlib.util.spec_from_file_location('bracket_policy_applier', str(mod_path))
|
||||
mod = importlib.util.module_from_spec(spec) # type: ignore[arg-type]
|
||||
mod = importlib.util.module_from_spec(spec)
|
||||
assert spec and spec.loader
|
||||
spec.loader.exec_module(mod) # type: ignore[assignment]
|
||||
spec.loader.exec_module(mod)
|
||||
return mod
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -30,8 +30,8 @@ def test_card_index_color_identity_list_handles_edge_cases(tmp_path, monkeypatch
|
|||
csv_path = write_csv(tmp_path)
|
||||
monkeypatch.setenv("CARD_INDEX_EXTRA_CSV", str(csv_path))
|
||||
# Force rebuild
|
||||
card_index._CARD_INDEX.clear() # type: ignore
|
||||
card_index._CARD_INDEX_MTIME = None # type: ignore
|
||||
card_index._CARD_INDEX.clear()
|
||||
card_index._CARD_INDEX_MTIME = None
|
||||
card_index.maybe_build_index()
|
||||
|
||||
pool = card_index.get_tag_pool("Blink")
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ from urllib.parse import parse_qs, urlparse
|
|||
import pytest
|
||||
from fastapi.testclient import TestClient
|
||||
|
||||
from code.web.app import app # type: ignore
|
||||
from code.web.app import app
|
||||
from code.web.services.commander_catalog_loader import clear_commander_catalog_cache
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ from pathlib import Path
|
|||
import pytest
|
||||
from fastapi.testclient import TestClient
|
||||
|
||||
from code.web.app import app # type: ignore
|
||||
from code.web.app import app
|
||||
from code.web.services import telemetry
|
||||
from code.web.services.commander_catalog_loader import clear_commander_catalog_cache
|
||||
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ from types import SimpleNamespace
|
|||
import pytest
|
||||
from fastapi.testclient import TestClient
|
||||
|
||||
from code.web.app import app # type: ignore
|
||||
from code.web.app import app
|
||||
from code.web.routes import commanders
|
||||
from code.web.services import commander_catalog_loader
|
||||
from code.web.services.commander_catalog_loader import clear_commander_catalog_cache, load_commander_catalog
|
||||
|
|
|
|||
|
|
@ -24,7 +24,7 @@ def load_app_with_env(**env: str) -> types.ModuleType:
|
|||
os.environ.pop(key, None)
|
||||
for k, v in env.items():
|
||||
os.environ[k] = v
|
||||
import code.web.app as app_module # type: ignore
|
||||
import code.web.app as app_module
|
||||
importlib.reload(app_module)
|
||||
return app_module
|
||||
|
||||
|
|
|
|||
|
|
@ -50,7 +50,7 @@ def _load_catalog() -> Dict[str, Any]:
|
|||
def test_deterministic_build_under_seed():
|
||||
# Import build after setting seed env
|
||||
os.environ['EDITORIAL_SEED'] = '999'
|
||||
from scripts.build_theme_catalog import build_catalog # type: ignore
|
||||
from scripts.build_theme_catalog import build_catalog
|
||||
first = build_catalog(limit=0, verbose=False)
|
||||
second = build_catalog(limit=0, verbose=False)
|
||||
# Drop volatile metadata_info/timestamp fields before comparison
|
||||
|
|
@ -106,7 +106,7 @@ def test_metadata_info_block_coverage():
|
|||
|
||||
|
||||
def test_synergy_commanders_exclusion_of_examples():
|
||||
import yaml # type: ignore
|
||||
import yaml
|
||||
pattern = re.compile(r" - Synergy \(.*\)$")
|
||||
violations: List[str] = []
|
||||
for p in CATALOG_DIR.glob('*.yml'):
|
||||
|
|
@ -128,7 +128,7 @@ def test_synergy_commanders_exclusion_of_examples():
|
|||
|
||||
|
||||
def test_mapping_trigger_specialization_guard():
|
||||
import yaml # type: ignore
|
||||
import yaml
|
||||
assert MAPPING.exists(), "description_mapping.yml missing"
|
||||
mapping_yaml = yaml.safe_load(MAPPING.read_text(encoding='utf-8')) or []
|
||||
triggers: Set[str] = set()
|
||||
|
|
|
|||
|
|
@ -20,7 +20,7 @@ def load_app_with_env(**env: str) -> types.ModuleType:
|
|||
os.environ.pop(key, None)
|
||||
for k, v in env.items():
|
||||
os.environ[k] = v
|
||||
import code.web.app as app_module # type: ignore
|
||||
import code.web.app as app_module
|
||||
importlib.reload(app_module)
|
||||
return app_module
|
||||
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@ class DummyBuilder(ReportingMixin):
|
|||
self.card_library = card_library
|
||||
self.color_identity = colors
|
||||
self.output_lines: List[str] = []
|
||||
self.output_func = self.output_lines.append # type: ignore[assignment]
|
||||
self.output_func = self.output_lines.append
|
||||
self._full_cards_df = None
|
||||
self._combined_cards_df = None
|
||||
self.include_exclude_diagnostics = None
|
||||
|
|
|
|||
|
|
@ -20,7 +20,7 @@ def _stub_modal_matrix(builder: DeckBuilder) -> None:
|
|||
"Forest": {"G": 1},
|
||||
}
|
||||
|
||||
builder._compute_color_source_matrix = MethodType(fake_matrix, builder) # type: ignore[attr-defined]
|
||||
builder._compute_color_source_matrix = MethodType(fake_matrix, builder)
|
||||
|
||||
|
||||
def test_modal_dfc_swaps_basic_when_enabled():
|
||||
|
|
|
|||
|
|
@ -18,7 +18,7 @@ def test_multicopy_clamp_trims_current_stage_additions_only():
|
|||
# Preseed 95 cards in the library
|
||||
b.card_library = {"Filler": {"Count": 95, "Role": "Test", "SubRole": "", "AddedBy": "Test"}}
|
||||
# Set a multi-copy selection that would exceed 100 by 15
|
||||
b._web_multi_copy = { # type: ignore[attr-defined]
|
||||
b._web_multi_copy = {
|
||||
"id": "persistent_petitioners",
|
||||
"name": "Persistent Petitioners",
|
||||
"count": 20,
|
||||
|
|
|
|||
|
|
@ -23,7 +23,7 @@ def test_petitioners_clamp_to_100_and_reduce_creature_slots():
|
|||
"card_advantage": 8, "protection": 4,
|
||||
}
|
||||
# Thread multi-copy selection for Petitioners as a creature archetype
|
||||
b._web_multi_copy = { # type: ignore[attr-defined]
|
||||
b._web_multi_copy = {
|
||||
"id": "persistent_petitioners",
|
||||
"name": "Persistent Petitioners",
|
||||
"count": 40, # intentionally large to trigger clamp/adjustments
|
||||
|
|
|
|||
|
|
@ -17,7 +17,7 @@ def _minimal_ctx(selection: dict):
|
|||
|
||||
b = DeckBuilder(output_func=out, input_func=lambda *_: "", headless=True)
|
||||
# Thread selection and ensure empty library
|
||||
b._web_multi_copy = selection # type: ignore[attr-defined]
|
||||
b._web_multi_copy = selection
|
||||
b.card_library = {}
|
||||
|
||||
ctx = {
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
import importlib
|
||||
import pytest
|
||||
try:
|
||||
from starlette.testclient import TestClient # type: ignore
|
||||
from starlette.testclient import TestClient
|
||||
except Exception: # pragma: no cover - optional dep in CI
|
||||
TestClient = None # type: ignore
|
||||
|
||||
|
|
|
|||
|
|
@ -128,7 +128,7 @@ def _make_request(path: str = "/api/partner/suggestions", query_string: str = ""
|
|||
"client": ("203.0.113.5", 52345),
|
||||
"server": ("testserver", 80),
|
||||
}
|
||||
request = Request(scope, receive=_receive) # type: ignore[arg-type]
|
||||
request = Request(scope, receive=_receive)
|
||||
request.state.request_id = "req-telemetry"
|
||||
return request
|
||||
|
||||
|
|
@ -197,21 +197,21 @@ def test_load_dataset_refresh_retries_after_prior_failure(tmp_path: Path, monkey
|
|||
from code.web.services import orchestrator as orchestrator_service
|
||||
|
||||
original_default = partner_service.DEFAULT_DATASET_PATH
|
||||
original_path = partner_service._DATASET_PATH # type: ignore[attr-defined]
|
||||
original_cache = partner_service._DATASET_CACHE # type: ignore[attr-defined]
|
||||
original_attempted = partner_service._DATASET_REFRESH_ATTEMPTED # type: ignore[attr-defined]
|
||||
original_path = partner_service._DATASET_PATH
|
||||
original_cache = partner_service._DATASET_CACHE
|
||||
original_attempted = partner_service._DATASET_REFRESH_ATTEMPTED
|
||||
|
||||
partner_service.DEFAULT_DATASET_PATH = dataset_path
|
||||
partner_service._DATASET_PATH = dataset_path # type: ignore[attr-defined]
|
||||
partner_service._DATASET_CACHE = None # type: ignore[attr-defined]
|
||||
partner_service._DATASET_REFRESH_ATTEMPTED = True # type: ignore[attr-defined]
|
||||
partner_service._DATASET_PATH = dataset_path
|
||||
partner_service._DATASET_CACHE = None
|
||||
partner_service._DATASET_REFRESH_ATTEMPTED = True
|
||||
|
||||
calls = {"count": 0}
|
||||
|
||||
payload_path = tmp_path / "seed_dataset.json"
|
||||
_write_dataset(payload_path)
|
||||
|
||||
def seeded_refresh(out_func=None, *, force=False, root=None): # type: ignore[override]
|
||||
def seeded_refresh(out_func=None, *, force=False, root=None):
|
||||
calls["count"] += 1
|
||||
dataset_path.write_text(payload_path.read_text(encoding="utf-8"), encoding="utf-8")
|
||||
|
||||
|
|
@ -227,9 +227,9 @@ def test_load_dataset_refresh_retries_after_prior_failure(tmp_path: Path, monkey
|
|||
assert calls["count"] == 1
|
||||
finally:
|
||||
partner_service.DEFAULT_DATASET_PATH = original_default
|
||||
partner_service._DATASET_PATH = original_path # type: ignore[attr-defined]
|
||||
partner_service._DATASET_CACHE = original_cache # type: ignore[attr-defined]
|
||||
partner_service._DATASET_REFRESH_ATTEMPTED = original_attempted # type: ignore[attr-defined]
|
||||
partner_service._DATASET_PATH = original_path
|
||||
partner_service._DATASET_CACHE = original_cache
|
||||
partner_service._DATASET_REFRESH_ATTEMPTED = original_attempted
|
||||
try:
|
||||
dataset_path.unlink()
|
||||
except FileNotFoundError:
|
||||
|
|
|
|||
|
|
@ -33,7 +33,7 @@ def _invoke_helper(
|
|||
) -> list[tuple[list[str], str]]:
|
||||
calls: list[tuple[list[str], str]] = []
|
||||
|
||||
def _fake_run(cmd, check=False, cwd=None): # type: ignore[no-untyped-def]
|
||||
def _fake_run(cmd, check=False, cwd=None):
|
||||
calls.append((list(cmd), cwd))
|
||||
class _Completed:
|
||||
returncode = 0
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@ fastapi = pytest.importorskip("fastapi")
|
|||
def load_app_with_env(**env: str) -> types.ModuleType:
|
||||
for k,v in env.items():
|
||||
os.environ[k] = v
|
||||
import code.web.app as app_module # type: ignore
|
||||
import code.web.app as app_module
|
||||
importlib.reload(app_module)
|
||||
return app_module
|
||||
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
import json
|
||||
from fastapi.testclient import TestClient
|
||||
|
||||
from code.web.app import app # type: ignore
|
||||
from code.web.app import app
|
||||
|
||||
|
||||
def test_preview_includes_curated_examples_regression():
|
||||
|
|
|
|||
|
|
@ -1,8 +1,8 @@
|
|||
import os
|
||||
|
||||
from code.web.services.theme_preview import get_theme_preview, bust_preview_cache # type: ignore
|
||||
from code.web.services import preview_cache as pc # type: ignore
|
||||
from code.web.services.preview_metrics import preview_metrics # type: ignore
|
||||
from code.web.services.theme_preview import get_theme_preview, bust_preview_cache
|
||||
from code.web.services import preview_cache as pc
|
||||
from code.web.services.preview_metrics import preview_metrics
|
||||
|
||||
|
||||
def _prime(slug: str, limit: int = 12, hits: int = 0, *, colors=None):
|
||||
|
|
@ -89,7 +89,7 @@ def test_env_weight_override(monkeypatch):
|
|||
bust_preview_cache()
|
||||
# Clear module-level caches for weights
|
||||
if hasattr(pc, '_EVICT_WEIGHTS_CACHE'):
|
||||
pc._EVICT_WEIGHTS_CACHE = None # type: ignore
|
||||
pc._EVICT_WEIGHTS_CACHE = None
|
||||
# Create two entries: one older with many hits, one fresh with none.
|
||||
_prime('Blink', limit=6, hits=6, colors=None) # older hot entry
|
||||
old_key = next(iter(pc.PREVIEW_CACHE.keys()))
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
import os
|
||||
from code.web.services.theme_preview import get_theme_preview, bust_preview_cache # type: ignore
|
||||
from code.web.services import preview_cache as pc # type: ignore
|
||||
from code.web.services.theme_preview import get_theme_preview, bust_preview_cache
|
||||
from code.web.services import preview_cache as pc
|
||||
|
||||
|
||||
def test_basic_low_score_eviction(monkeypatch):
|
||||
|
|
@ -17,7 +17,7 @@ def test_basic_low_score_eviction(monkeypatch):
|
|||
get_theme_preview('Blink', limit=6, colors=c)
|
||||
# Cache limit 5, inserted 6 distinct -> eviction should have occurred
|
||||
assert len(pc.PREVIEW_CACHE) <= 5
|
||||
from code.web.services.preview_metrics import preview_metrics # type: ignore
|
||||
from code.web.services.preview_metrics import preview_metrics
|
||||
m = preview_metrics()
|
||||
assert m['preview_cache_evictions'] >= 1, 'Expected at least one eviction'
|
||||
assert m['preview_cache_evictions_by_reason'].get('low_score', 0) >= 1
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
from fastapi.testclient import TestClient
|
||||
from code.web.app import app # type: ignore
|
||||
from code.web.app import app
|
||||
|
||||
|
||||
def test_minimal_variant_hides_controls_and_headers():
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ pytestmark = pytest.mark.skip(reason="M4: preview_perf_benchmark module removed
|
|||
def test_fetch_all_theme_slugs_retries(monkeypatch):
|
||||
calls = {"count": 0}
|
||||
|
||||
def fake_fetch(url): # type: ignore[override]
|
||||
def fake_fetch(url):
|
||||
calls["count"] += 1
|
||||
if calls["count"] == 1:
|
||||
raise RuntimeError("transient 500")
|
||||
|
|
@ -27,7 +27,7 @@ def test_fetch_all_theme_slugs_retries(monkeypatch):
|
|||
def test_fetch_all_theme_slugs_page_level_retry(monkeypatch):
|
||||
calls = {"count": 0}
|
||||
|
||||
def fake_fetch_with_retry(url, attempts=3, delay=0.6): # type: ignore[override]
|
||||
def fake_fetch_with_retry(url, attempts=3, delay=0.6):
|
||||
calls["count"] += 1
|
||||
if calls["count"] < 3:
|
||||
raise RuntimeError("service warming up")
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
from fastapi.testclient import TestClient
|
||||
from code.web.app import app # type: ignore
|
||||
from code.web.app import app
|
||||
|
||||
|
||||
def test_preview_fragment_suppress_curated_removes_examples():
|
||||
|
|
|
|||
|
|
@ -3,16 +3,16 @@ from code.web.services import preview_cache as pc
|
|||
|
||||
def _force_interval_elapsed():
|
||||
# Ensure adaptation interval guard passes
|
||||
if pc._LAST_ADAPT_AT is not None: # type: ignore[attr-defined]
|
||||
pc._LAST_ADAPT_AT -= (pc._ADAPT_INTERVAL_S + 1) # type: ignore[attr-defined]
|
||||
if pc._LAST_ADAPT_AT is not None:
|
||||
pc._LAST_ADAPT_AT -= (pc._ADAPT_INTERVAL_S + 1)
|
||||
|
||||
|
||||
def test_ttl_adapts_down_and_up(capsys):
|
||||
# Enable adaptation regardless of env
|
||||
pc._ADAPTATION_ENABLED = True # type: ignore[attr-defined]
|
||||
pc.TTL_SECONDS = pc._TTL_BASE # type: ignore[attr-defined]
|
||||
pc._RECENT_HITS.clear() # type: ignore[attr-defined]
|
||||
pc._LAST_ADAPT_AT = None # type: ignore[attr-defined]
|
||||
pc._ADAPTATION_ENABLED = True
|
||||
pc.TTL_SECONDS = pc._TTL_BASE
|
||||
pc._RECENT_HITS.clear()
|
||||
pc._LAST_ADAPT_AT = None
|
||||
|
||||
# Low hit ratio pattern (~0.1)
|
||||
for _ in range(72):
|
||||
|
|
@ -23,11 +23,11 @@ def test_ttl_adapts_down_and_up(capsys):
|
|||
out1 = capsys.readouterr().out
|
||||
assert "theme_preview_ttl_adapt" in out1, "expected adaptation log for low hit ratio"
|
||||
ttl_after_down = pc.TTL_SECONDS
|
||||
assert ttl_after_down <= pc._TTL_BASE # type: ignore[attr-defined]
|
||||
assert ttl_after_down <= pc._TTL_BASE
|
||||
|
||||
# Force interval elapsed & high hit ratio pattern (~0.9)
|
||||
_force_interval_elapsed()
|
||||
pc._RECENT_HITS.clear() # type: ignore[attr-defined]
|
||||
pc._RECENT_HITS.clear()
|
||||
for _ in range(72):
|
||||
pc.record_request_hit(True)
|
||||
for _ in range(8):
|
||||
|
|
|
|||
|
|
@ -19,17 +19,17 @@ def _client_with_flags(window_s: int = 2, limit_random: int = 2, limit_build: in
|
|||
|
||||
# Force fresh import so RATE_LIMIT_* constants reflect env
|
||||
sys.modules.pop('code.web.app', None)
|
||||
from code.web import app as app_module # type: ignore
|
||||
from code.web import app as app_module
|
||||
# Force override constants for deterministic test
|
||||
try:
|
||||
app_module.RATE_LIMIT_ENABLED = True # type: ignore[attr-defined]
|
||||
app_module.RATE_LIMIT_WINDOW_S = window_s # type: ignore[attr-defined]
|
||||
app_module.RATE_LIMIT_RANDOM = limit_random # type: ignore[attr-defined]
|
||||
app_module.RATE_LIMIT_BUILD = limit_build # type: ignore[attr-defined]
|
||||
app_module.RATE_LIMIT_SUGGEST = limit_suggest # type: ignore[attr-defined]
|
||||
app_module.RATE_LIMIT_ENABLED = True
|
||||
app_module.RATE_LIMIT_WINDOW_S = window_s
|
||||
app_module.RATE_LIMIT_RANDOM = limit_random
|
||||
app_module.RATE_LIMIT_BUILD = limit_build
|
||||
app_module.RATE_LIMIT_SUGGEST = limit_suggest
|
||||
# Reset in-memory counters
|
||||
if hasattr(app_module, '_RL_COUNTS'):
|
||||
app_module._RL_COUNTS.clear() # type: ignore[attr-defined]
|
||||
app_module._RL_COUNTS.clear()
|
||||
except Exception:
|
||||
pass
|
||||
return TestClient(app_module.app)
|
||||
|
|
|
|||
|
|
@ -3,8 +3,8 @@ from pathlib import Path
|
|||
|
||||
from fastapi.testclient import TestClient
|
||||
|
||||
from code.web import app as web_app # type: ignore
|
||||
from code.web.app import app # type: ignore
|
||||
from code.web import app as web_app
|
||||
from code.web.app import app
|
||||
|
||||
# Ensure project root on sys.path for absolute imports
|
||||
ROOT = Path(__file__).resolve().parents[2]
|
||||
|
|
|
|||
|
|
@ -9,17 +9,17 @@ def setup_module(module): # ensure deterministic env weights
|
|||
|
||||
def test_rarity_diminishing():
|
||||
# Monkeypatch internal index
|
||||
card_index._CARD_INDEX.clear() # type: ignore
|
||||
card_index._CARD_INDEX.clear()
|
||||
theme = "Test Theme"
|
||||
card_index._CARD_INDEX[theme] = [ # type: ignore
|
||||
card_index._CARD_INDEX[theme] = [
|
||||
{"name": "Mythic One", "tags": [theme], "color_identity": "G", "mana_cost": "G", "rarity": "mythic"},
|
||||
{"name": "Mythic Two", "tags": [theme], "color_identity": "G", "mana_cost": "G", "rarity": "mythic"},
|
||||
]
|
||||
def no_build():
|
||||
return None
|
||||
sampling.maybe_build_index = no_build # type: ignore
|
||||
sampling.maybe_build_index = no_build
|
||||
cards = sampling.sample_real_cards_for_theme(theme, 2, None, synergies=[theme], commander=None)
|
||||
rarity_weights = [r for c in cards for r in c["reasons"] if r.startswith("rarity_weight_calibrated")] # type: ignore
|
||||
rarity_weights = [r for c in cards for r in c["reasons"] if r.startswith("rarity_weight_calibrated")]
|
||||
assert len(rarity_weights) >= 2
|
||||
v1 = float(rarity_weights[0].split(":")[-1])
|
||||
v2 = float(rarity_weights[1].split(":")[-1])
|
||||
|
|
@ -40,15 +40,15 @@ def test_commander_overlap_monotonic_diminishing():
|
|||
|
||||
|
||||
def test_splash_off_color_penalty_applied():
|
||||
card_index._CARD_INDEX.clear() # type: ignore
|
||||
card_index._CARD_INDEX.clear()
|
||||
theme = "Splash Theme"
|
||||
# Commander W U B R (4 colors)
|
||||
commander = {"name": "CommanderTest", "tags": [theme], "color_identity": "WUBR", "mana_cost": "", "rarity": "mythic"}
|
||||
# Card with single off-color G (W U B R G)
|
||||
splash_card = {"name": "CardSplash", "tags": [theme], "color_identity": "WUBRG", "mana_cost": "G", "rarity": "rare"}
|
||||
card_index._CARD_INDEX[theme] = [commander, splash_card] # type: ignore
|
||||
sampling.maybe_build_index = lambda: None # type: ignore
|
||||
card_index._CARD_INDEX[theme] = [commander, splash_card]
|
||||
sampling.maybe_build_index = lambda: None
|
||||
cards = sampling.sample_real_cards_for_theme(theme, 2, None, synergies=[theme], commander="CommanderTest")
|
||||
splash = next((c for c in cards if c["name"] == "CardSplash"), None)
|
||||
assert splash is not None
|
||||
assert any(r.startswith("splash_off_color_penalty") for r in splash["reasons"]) # type: ignore
|
||||
assert any(r.startswith("splash_off_color_penalty") for r in splash["reasons"])
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
import re
|
||||
from code.web.services.theme_preview import get_theme_preview # type: ignore
|
||||
from code.web.services.theme_preview import get_theme_preview
|
||||
|
||||
# We can't easily execute the JS normalizeCardName in Python, but we can ensure
|
||||
# server-delivered sample names that include appended synergy annotations are not
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@ fastapi = pytest.importorskip("fastapi") # skip if FastAPI missing
|
|||
def load_app_with_env(**env: str) -> types.ModuleType:
|
||||
for k, v in env.items():
|
||||
os.environ[k] = v
|
||||
import code.web.app as app_module # type: ignore
|
||||
import code.web.app as app_module
|
||||
importlib.reload(app_module)
|
||||
return app_module
|
||||
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ import sys
|
|||
from pathlib import Path
|
||||
import pytest
|
||||
from fastapi.testclient import TestClient
|
||||
from code.web.app import app # type: ignore
|
||||
from code.web.app import app
|
||||
|
||||
# Ensure project root on sys.path for absolute imports
|
||||
ROOT = Path(__file__).resolve().parents[2]
|
||||
|
|
|
|||
|
|
@ -146,7 +146,7 @@ def test_generate_theme_catalog_basic(tmp_path: Path, fixed_now: datetime) -> No
|
|||
assert all(row['last_generated_at'] == result.generated_at for row in rows)
|
||||
assert all(row['version'] == result.version for row in rows)
|
||||
|
||||
expected_hash = new_catalog._compute_version_hash([row['theme'] for row in rows]) # type: ignore[attr-defined]
|
||||
expected_hash = new_catalog._compute_version_hash([row['theme'] for row in rows])
|
||||
assert result.version == expected_hash
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ import os
|
|||
import importlib
|
||||
from pathlib import Path
|
||||
from starlette.testclient import TestClient
|
||||
from code.type_definitions_theme_catalog import ThemeCatalog # type: ignore
|
||||
from code.type_definitions_theme_catalog import ThemeCatalog
|
||||
|
||||
CATALOG_PATH = Path('config/themes/theme_list.json')
|
||||
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ def test_theme_list_json_validates_against_pydantic_and_fast_path():
|
|||
raw = json.loads(p.read_text(encoding='utf-8'))
|
||||
|
||||
# Pydantic validation
|
||||
from code.type_definitions_theme_catalog import ThemeCatalog # type: ignore
|
||||
from code.type_definitions_theme_catalog import ThemeCatalog
|
||||
catalog = ThemeCatalog(**raw)
|
||||
assert isinstance(catalog.themes, list) and len(catalog.themes) > 0
|
||||
# Basic fields exist on entries
|
||||
|
|
|
|||
|
|
@ -36,7 +36,7 @@ from fastapi.testclient import TestClient
|
|||
|
||||
|
||||
def _get_app(): # local import to avoid heavy import cost if file unused
|
||||
from code.web.app import app # type: ignore
|
||||
from code.web.app import app
|
||||
return app
|
||||
|
||||
|
||||
|
|
@ -115,13 +115,13 @@ def test_preview_cache_hit_timing(monkeypatch, client):
|
|||
r1 = client.get(f"/themes/fragment/preview/{theme_id}?limit=12")
|
||||
assert r1.status_code == 200
|
||||
# Monkeypatch theme_preview._now to freeze time so second call counts as hit
|
||||
import code.web.services.theme_preview as tp # type: ignore
|
||||
import code.web.services.theme_preview as tp
|
||||
orig_now = tp._now
|
||||
monkeypatch.setattr(tp, "_now", lambda: orig_now())
|
||||
r2 = client.get(f"/themes/fragment/preview/{theme_id}?limit=12")
|
||||
assert r2.status_code == 200
|
||||
# Deterministic service-level verification: second direct function call should short-circuit via cache
|
||||
import code.web.services.theme_preview as tp # type: ignore
|
||||
import code.web.services.theme_preview as tp
|
||||
# Snapshot counters
|
||||
pre_hits = getattr(tp, "_PREVIEW_CACHE_HITS", 0)
|
||||
first_payload = tp.get_theme_preview(theme_id, limit=12)
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@ def _new_client(prewarm: bool = False) -> TestClient:
|
|||
# Remove existing module (if any) so lifespan runs again
|
||||
if 'code.web.app' in list(importlib.sys.modules.keys()):
|
||||
importlib.sys.modules.pop('code.web.app')
|
||||
from code.web.app import app # type: ignore
|
||||
from code.web.app import app
|
||||
return TestClient(app)
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -2,8 +2,8 @@ from __future__ import annotations
|
|||
|
||||
import pytest
|
||||
|
||||
from code.web.services.theme_preview import get_theme_preview # type: ignore
|
||||
from code.web.services.theme_catalog_loader import load_index, slugify, project_detail # type: ignore
|
||||
from code.web.services.theme_preview import get_theme_preview
|
||||
from code.web.services.theme_catalog_loader import load_index, slugify, project_detail
|
||||
|
||||
|
||||
@pytest.mark.parametrize("limit", [8, 12])
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
import os
|
||||
import time
|
||||
import json
|
||||
from code.web.services.theme_preview import get_theme_preview, preview_metrics, bust_preview_cache # type: ignore
|
||||
from code.web.services.theme_preview import get_theme_preview, preview_metrics, bust_preview_cache
|
||||
|
||||
|
||||
def test_colors_filter_constraint_green_subset():
|
||||
|
|
|
|||
|
|
@ -47,10 +47,10 @@ class DummySpellBuilder(SpellAdditionMixin):
|
|||
def rng(self) -> DummyRNG:
|
||||
return self._rng
|
||||
|
||||
def get_theme_context(self) -> ThemeContext: # type: ignore[override]
|
||||
def get_theme_context(self) -> ThemeContext:
|
||||
return self._theme_context
|
||||
|
||||
def add_card(self, name: str, **kwargs: Any) -> None: # type: ignore[override]
|
||||
def add_card(self, name: str, **kwargs: Any) -> None:
|
||||
self.card_library[name] = {"Count": kwargs.get("count", 1)}
|
||||
self.added_cards.append(name)
|
||||
|
||||
|
|
|
|||
|
|
@ -20,7 +20,7 @@ def _fresh_client() -> TestClient:
|
|||
from code.web.services.commander_catalog_loader import clear_commander_catalog_cache
|
||||
|
||||
clear_commander_catalog_cache()
|
||||
from code.web.app import app # type: ignore
|
||||
from code.web.app import app
|
||||
|
||||
client = TestClient(app)
|
||||
from code.web.services import tasks
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ from pathlib import Path
|
|||
def get_unused_ignores():
|
||||
"""Run mypy and extract all unused-ignore errors."""
|
||||
result = subprocess.run(
|
||||
['python', '-m', 'mypy', 'code/web/', '--show-error-codes'],
|
||||
['python', '-m', 'mypy', 'code', '--show-error-codes'],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
cwd=Path(__file__).parent
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue