mirror of
https://github.com/mwisnowski/mtg_python_deckbuilder.git
synced 2025-12-16 23:50:12 +01:00
feat: implement batch build and comparison
This commit is contained in:
parent
1d95c5cbd0
commit
f1e21873e7
20 changed files with 2691 additions and 6 deletions
|
|
@ -14,6 +14,7 @@ from ..app import (
|
|||
ENABLE_PARTNER_MECHANICS,
|
||||
ENABLE_PARTNER_SUGGESTIONS,
|
||||
WEB_IDEALS_UI,
|
||||
ENABLE_BATCH_BUILD,
|
||||
)
|
||||
from ..services.build_utils import (
|
||||
step5_base_ctx,
|
||||
|
|
@ -1357,6 +1358,7 @@ async def build_new_modal(request: Request) -> HTMLResponse:
|
|||
"allow_must_haves": ALLOW_MUST_HAVES, # Add feature flag
|
||||
"show_must_have_buttons": SHOW_MUST_HAVE_BUTTONS,
|
||||
"enable_custom_themes": ENABLE_CUSTOM_THEMES,
|
||||
"enable_batch_build": ENABLE_BATCH_BUILD,
|
||||
"ideals_ui_mode": WEB_IDEALS_UI, # 'input' or 'slider'
|
||||
"form": {
|
||||
"prefer_combos": bool(sess.get("prefer_combos")),
|
||||
|
|
@ -1952,6 +1954,8 @@ async def build_new_submit(
|
|||
enforcement_mode: str = Form("warn"),
|
||||
allow_illegal: bool = Form(False),
|
||||
fuzzy_matching: bool = Form(True),
|
||||
# Build count for multi-build
|
||||
build_count: int = Form(1),
|
||||
# Quick Build flag
|
||||
quick_build: str | None = Form(None),
|
||||
) -> HTMLResponse:
|
||||
|
|
@ -2025,6 +2029,7 @@ async def build_new_submit(
|
|||
"allow_must_haves": ALLOW_MUST_HAVES,
|
||||
"show_must_have_buttons": SHOW_MUST_HAVE_BUTTONS,
|
||||
"enable_custom_themes": ENABLE_CUSTOM_THEMES,
|
||||
"enable_batch_build": ENABLE_BATCH_BUILD,
|
||||
"form": _form_state(suggested),
|
||||
"tag_slot_html": None,
|
||||
}
|
||||
|
|
@ -2049,6 +2054,7 @@ async def build_new_submit(
|
|||
"allow_must_haves": ALLOW_MUST_HAVES, # Add feature flag
|
||||
"show_must_have_buttons": SHOW_MUST_HAVE_BUTTONS,
|
||||
"enable_custom_themes": ENABLE_CUSTOM_THEMES,
|
||||
"enable_batch_build": ENABLE_BATCH_BUILD,
|
||||
"form": _form_state(commander),
|
||||
"tag_slot_html": None,
|
||||
}
|
||||
|
|
@ -2153,6 +2159,7 @@ async def build_new_submit(
|
|||
"allow_must_haves": ALLOW_MUST_HAVES,
|
||||
"show_must_have_buttons": SHOW_MUST_HAVE_BUTTONS,
|
||||
"enable_custom_themes": ENABLE_CUSTOM_THEMES,
|
||||
"enable_batch_build": ENABLE_BATCH_BUILD,
|
||||
"form": _form_state(primary_commander_name),
|
||||
"tag_slot_html": tag_slot_html,
|
||||
}
|
||||
|
|
@ -2291,6 +2298,7 @@ async def build_new_submit(
|
|||
"allow_must_haves": ALLOW_MUST_HAVES,
|
||||
"show_must_have_buttons": SHOW_MUST_HAVE_BUTTONS,
|
||||
"enable_custom_themes": ENABLE_CUSTOM_THEMES,
|
||||
"enable_batch_build": ENABLE_BATCH_BUILD,
|
||||
"form": _form_state(sess.get("commander", "")),
|
||||
"tag_slot_html": None,
|
||||
}
|
||||
|
|
@ -2479,7 +2487,101 @@ async def build_new_submit(
|
|||
# Centralized staged context creation
|
||||
sess["build_ctx"] = start_ctx_from_session(sess)
|
||||
|
||||
# Check if Quick Build was requested
|
||||
# Validate and normalize build_count
|
||||
try:
|
||||
build_count = max(1, min(10, int(build_count)))
|
||||
except Exception:
|
||||
build_count = 1
|
||||
|
||||
# Check if this is a multi-build request (build_count > 1)
|
||||
if build_count > 1:
|
||||
# Multi-Build: Queue parallel builds and return batch progress page
|
||||
from ..services.multi_build_orchestrator import queue_builds, run_batch_async
|
||||
|
||||
# Create config dict from session for batch builds
|
||||
batch_config = {
|
||||
"commander": sess.get("commander"),
|
||||
"tags": sess.get("tags", []),
|
||||
"tag_mode": sess.get("tag_mode", "AND"),
|
||||
"bracket": sess.get("bracket", 3),
|
||||
"ideals": sess.get("ideals", {}),
|
||||
"prefer_combos": sess.get("prefer_combos", False),
|
||||
"combo_target_count": sess.get("combo_target_count"),
|
||||
"combo_balance": sess.get("combo_balance"),
|
||||
"multi_copy": sess.get("multi_copy"),
|
||||
"use_owned_only": sess.get("use_owned_only", False),
|
||||
"prefer_owned": sess.get("prefer_owned", False),
|
||||
"swap_mdfc_basics": sess.get("swap_mdfc_basics", False),
|
||||
"include_cards": sess.get("include_cards", []),
|
||||
"exclude_cards": sess.get("exclude_cards", []),
|
||||
"enforcement_mode": sess.get("enforcement_mode", "warn"),
|
||||
"allow_illegal": sess.get("allow_illegal", False),
|
||||
"fuzzy_matching": sess.get("fuzzy_matching", True),
|
||||
"locks": list(sess.get("locks", [])),
|
||||
}
|
||||
|
||||
# Handle partner mechanics if present
|
||||
if sess.get("partner_enabled"):
|
||||
batch_config["partner_enabled"] = True
|
||||
if sess.get("secondary_commander"):
|
||||
batch_config["secondary_commander"] = sess["secondary_commander"]
|
||||
if sess.get("background"):
|
||||
batch_config["background"] = sess["background"]
|
||||
if sess.get("partner_mode"):
|
||||
batch_config["partner_mode"] = sess["partner_mode"]
|
||||
if sess.get("combined_commander"):
|
||||
batch_config["combined_commander"] = sess["combined_commander"]
|
||||
|
||||
# Add color identity for synergy builder (needed for basic land allocation)
|
||||
try:
|
||||
tmp_builder = DeckBuilder(output_func=lambda *_: None, input_func=lambda *_: "", headless=True)
|
||||
|
||||
# Handle partner mechanics if present
|
||||
if sess.get("partner_enabled") and sess.get("secondary_commander"):
|
||||
from deck_builder.partner_selection import apply_partner_inputs
|
||||
combined_obj = apply_partner_inputs(
|
||||
tmp_builder,
|
||||
primary_name=sess["commander"],
|
||||
secondary_name=sess.get("secondary_commander"),
|
||||
background_name=sess.get("background"),
|
||||
feature_enabled=True,
|
||||
)
|
||||
if combined_obj and hasattr(combined_obj, "color_identity"):
|
||||
batch_config["colors"] = list(combined_obj.color_identity)
|
||||
else:
|
||||
# Single commander
|
||||
df = tmp_builder.load_commander_data()
|
||||
row = df[df["name"] == sess["commander"]]
|
||||
if not row.empty:
|
||||
# Get colorIdentity from dataframe (it's a string like "RG" or "G")
|
||||
color_str = row.iloc[0].get("colorIdentity", "")
|
||||
if color_str:
|
||||
batch_config["colors"] = list(color_str) # Convert "RG" to ['R', 'G']
|
||||
except Exception as e:
|
||||
import logging
|
||||
logging.getLogger(__name__).warning(f"[Batch] Failed to load color identity for {sess.get('commander')}: {e}")
|
||||
pass # Not critical, synergy builder will skip basics if missing
|
||||
|
||||
# Queue the batch
|
||||
batch_id = queue_builds(batch_config, build_count, sid)
|
||||
|
||||
# Start background task for parallel builds
|
||||
background_tasks.add_task(run_batch_async, batch_id, sid)
|
||||
|
||||
# Return batch progress template
|
||||
progress_ctx = {
|
||||
"request": request,
|
||||
"batch_id": batch_id,
|
||||
"build_count": build_count,
|
||||
"completed": 0,
|
||||
"current_build": 1,
|
||||
"status": "Starting builds..."
|
||||
}
|
||||
resp = templates.TemplateResponse("build/_batch_progress.html", progress_ctx)
|
||||
resp.set_cookie("sid", sid, httponly=True, samesite="lax")
|
||||
return resp
|
||||
|
||||
# Check if Quick Build was requested (single build only)
|
||||
is_quick_build = (quick_build or "").strip() == "1"
|
||||
|
||||
if is_quick_build:
|
||||
|
|
@ -3785,6 +3887,68 @@ def quick_build_progress(request: Request):
|
|||
response.set_cookie("sid", sid, httponly=True, samesite="lax")
|
||||
return response
|
||||
|
||||
|
||||
@router.get("/batch-progress")
|
||||
def batch_build_progress(request: Request, batch_id: str = Query(...)):
|
||||
"""Poll endpoint for Batch Build progress. Returns either progress indicator or redirect to comparison."""
|
||||
import logging
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
sid = request.cookies.get("sid") or new_sid()
|
||||
sess = get_session(sid)
|
||||
|
||||
from ..services.build_cache import BuildCache
|
||||
|
||||
batch_status = BuildCache.get_batch_status(sess, batch_id)
|
||||
logger.info(f"[Batch Progress Poll] batch_id={batch_id}, status={batch_status}")
|
||||
|
||||
if not batch_status:
|
||||
return HTMLResponse('<div class="error">Batch not found. Please refresh.</div>')
|
||||
|
||||
if batch_status["status"] == "completed":
|
||||
# All builds complete - redirect to comparison page
|
||||
response = HTMLResponse(f'<script>window.location.href = "/compare/{batch_id}";</script>')
|
||||
response.set_cookie("sid", sid, httponly=True, samesite="lax")
|
||||
return response
|
||||
|
||||
# Get config to determine color count for time estimate
|
||||
config = BuildCache.get_batch_config(sess, batch_id)
|
||||
commander_name = config.get("commander", "") if config else ""
|
||||
|
||||
# Estimate time based on color count (from testing data)
|
||||
time_estimate = "1-3 minutes"
|
||||
if commander_name and config:
|
||||
# Try to get commander's color identity
|
||||
try:
|
||||
from ..services import orchestrator as orch
|
||||
cmd_data = orch.load_commander(commander_name)
|
||||
if cmd_data and "colorIdentity" in cmd_data:
|
||||
color_count = len(cmd_data.get("colorIdentity", []))
|
||||
if color_count <= 2:
|
||||
time_estimate = "1-3 minutes"
|
||||
elif color_count == 3:
|
||||
time_estimate = "2-4 minutes"
|
||||
else: # 4-5 colors
|
||||
time_estimate = "3-5 minutes"
|
||||
except Exception:
|
||||
pass # Default to 1-3 if we can't determine
|
||||
|
||||
# Build still running - return progress content partial only
|
||||
ctx = {
|
||||
"request": request,
|
||||
"batch_id": batch_id,
|
||||
"build_count": batch_status["count"],
|
||||
"completed": batch_status["completed"],
|
||||
"progress_pct": batch_status["progress_pct"],
|
||||
"status": f"Building deck {batch_status['completed'] + 1} of {batch_status['count']}..." if batch_status['completed'] < batch_status['count'] else "Finalizing...",
|
||||
"has_errors": batch_status["has_errors"],
|
||||
"error_count": batch_status["error_count"],
|
||||
"time_estimate": time_estimate
|
||||
}
|
||||
response = templates.TemplateResponse("build/_batch_progress_content.html", ctx)
|
||||
response.set_cookie("sid", sid, httponly=True, samesite="lax")
|
||||
return response
|
||||
|
||||
# --- Phase 8: Lock/Replace/Compare/Permalink minimal API ---
|
||||
|
||||
@router.post("/lock")
|
||||
|
|
|
|||
730
code/web/routes/compare.py
Normal file
730
code/web/routes/compare.py
Normal file
|
|
@ -0,0 +1,730 @@
|
|||
"""
|
||||
Comparison Routes - Side-by-side deck comparison for batch builds.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
from fastapi import APIRouter, Request
|
||||
from fastapi.responses import HTMLResponse
|
||||
from typing import Any, Dict, List
|
||||
from ..app import templates
|
||||
from ..services.build_cache import BuildCache
|
||||
from ..services.tasks import get_session, new_sid
|
||||
from ..services.synergy_builder import analyze_and_build_synergy_deck
|
||||
from code.logging_util import get_logger
|
||||
import time
|
||||
|
||||
logger = get_logger(__name__)
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
def _is_guaranteed_card(card_name: str) -> bool:
|
||||
"""
|
||||
Check if a card is guaranteed/staple (should be filtered from interesting variance).
|
||||
|
||||
Filters:
|
||||
- Basic lands (Plains, Island, Swamp, Mountain, Forest, Wastes, Snow-Covered variants)
|
||||
- Staple lands (Command Tower, Reliquary Tower, etc.)
|
||||
- Kindred lands
|
||||
- Generic fetch lands
|
||||
|
||||
Args:
|
||||
card_name: Card name to check
|
||||
|
||||
Returns:
|
||||
True if card should be filtered from "Most Common Cards"
|
||||
"""
|
||||
try:
|
||||
from code.deck_builder import builder_constants as bc
|
||||
|
||||
# Basic lands
|
||||
basic_lands = set(getattr(bc, 'BASIC_LANDS', []))
|
||||
if card_name in basic_lands:
|
||||
return True
|
||||
|
||||
# Snow-covered basics
|
||||
if card_name.startswith('Snow-Covered '):
|
||||
base_name = card_name.replace('Snow-Covered ', '')
|
||||
if base_name in basic_lands:
|
||||
return True
|
||||
|
||||
# Staple lands (keys from STAPLE_LAND_CONDITIONS)
|
||||
staple_conditions = getattr(bc, 'STAPLE_LAND_CONDITIONS', {})
|
||||
if card_name in staple_conditions:
|
||||
return True
|
||||
|
||||
# Kindred lands
|
||||
kindred_lands = set(getattr(bc, 'KINDRED_LAND_NAMES', []))
|
||||
if card_name in kindred_lands:
|
||||
return True
|
||||
|
||||
# Generic fetch lands
|
||||
generic_fetches = set(getattr(bc, 'GENERIC_FETCH_LANDS', []))
|
||||
if card_name in generic_fetches:
|
||||
return True
|
||||
|
||||
# Color-specific fetch lands
|
||||
color_fetches = getattr(bc, 'COLOR_TO_FETCH_LANDS', {})
|
||||
for fetch_list in color_fetches.values():
|
||||
if card_name in fetch_list:
|
||||
return True
|
||||
|
||||
return False
|
||||
except Exception as e:
|
||||
logger.debug(f"Error checking guaranteed card status for {card_name}: {e}")
|
||||
return False
|
||||
|
||||
|
||||
@router.get("/compare/{batch_id}", response_class=HTMLResponse)
|
||||
async def compare_batch(request: Request, batch_id: str) -> HTMLResponse:
|
||||
"""Main comparison view for batch builds."""
|
||||
sid = request.cookies.get("sid") or new_sid()
|
||||
sess = get_session(sid)
|
||||
|
||||
# Get batch data
|
||||
batch_status = BuildCache.get_batch_status(sess, batch_id)
|
||||
if not batch_status:
|
||||
return templates.TemplateResponse("error.html", {
|
||||
"request": request,
|
||||
"error": f"Batch {batch_id} not found. It may have expired.",
|
||||
"back_link": "/build"
|
||||
})
|
||||
|
||||
builds = BuildCache.get_batch_builds(sess, batch_id)
|
||||
config = BuildCache.get_batch_config(sess, batch_id)
|
||||
|
||||
if not builds:
|
||||
return templates.TemplateResponse("error.html", {
|
||||
"request": request,
|
||||
"error": "No completed builds found in this batch.",
|
||||
"back_link": "/build"
|
||||
})
|
||||
|
||||
# Calculate card overlap statistics
|
||||
overlap_stats = _calculate_overlap(builds)
|
||||
|
||||
# Prepare deck summaries
|
||||
summaries = []
|
||||
for build in builds:
|
||||
summary = _build_summary(build["result"], build["index"])
|
||||
summaries.append(summary)
|
||||
|
||||
ctx = {
|
||||
"request": request,
|
||||
"batch_id": batch_id,
|
||||
"batch_status": batch_status,
|
||||
"config": config,
|
||||
"builds": summaries,
|
||||
"overlap_stats": overlap_stats,
|
||||
"build_count": len(summaries),
|
||||
"synergy_exported": BuildCache.is_synergy_exported(sess, batch_id)
|
||||
}
|
||||
|
||||
resp = templates.TemplateResponse("compare/index.html", ctx)
|
||||
resp.set_cookie("sid", sid, httponly=True, samesite="lax")
|
||||
return resp
|
||||
|
||||
|
||||
def _calculate_overlap(builds: List[Dict[str, Any]]) -> Dict[str, Any]:
|
||||
"""
|
||||
Calculate card overlap statistics across builds.
|
||||
|
||||
Args:
|
||||
builds: List of build result dicts
|
||||
|
||||
Returns:
|
||||
Dict with overlap statistics
|
||||
"""
|
||||
from collections import Counter
|
||||
|
||||
# Collect all cards with their appearance counts
|
||||
card_counts: Counter = Counter()
|
||||
total_builds = len(builds)
|
||||
|
||||
# Collect include cards (must-includes) from first build as they should be in all
|
||||
include_cards_set = set()
|
||||
if builds:
|
||||
first_result = builds[0].get("result", {})
|
||||
first_summary = first_result.get("summary", {})
|
||||
if isinstance(first_summary, dict):
|
||||
include_exclude = first_summary.get("include_exclude_summary", {})
|
||||
if isinstance(include_exclude, dict):
|
||||
includes = include_exclude.get("include_cards", [])
|
||||
if isinstance(includes, list):
|
||||
include_cards_set = set(includes)
|
||||
|
||||
for build in builds:
|
||||
result = build.get("result", {})
|
||||
summary = result.get("summary", {})
|
||||
if not isinstance(summary, dict):
|
||||
continue
|
||||
|
||||
type_breakdown = summary.get("type_breakdown", {})
|
||||
if not isinstance(type_breakdown, dict):
|
||||
continue
|
||||
|
||||
# Track unique cards per build (from type_breakdown cards dict)
|
||||
unique_cards = set()
|
||||
type_cards = type_breakdown.get("cards", {})
|
||||
if isinstance(type_cards, dict):
|
||||
for card_list in type_cards.values():
|
||||
if isinstance(card_list, list):
|
||||
for card in card_list:
|
||||
if isinstance(card, dict):
|
||||
card_name = card.get("name")
|
||||
if card_name:
|
||||
unique_cards.add(card_name)
|
||||
|
||||
# Increment counter for each unique card
|
||||
for card_name in unique_cards:
|
||||
card_counts[card_name] += 1
|
||||
|
||||
# Calculate statistics
|
||||
total_unique_cards = len(card_counts)
|
||||
cards_in_all = sum(1 for count in card_counts.values() if count == total_builds)
|
||||
cards_in_most = sum(1 for count in card_counts.values() if count >= total_builds * 0.8)
|
||||
cards_in_some = sum(1 for count in card_counts.values() if total_builds * 0.2 < count < total_builds * 0.8)
|
||||
cards_in_few = sum(1 for count in card_counts.values() if count <= total_builds * 0.2)
|
||||
|
||||
# Most common cards - filter out guaranteed/staple cards to highlight interesting variance
|
||||
# Filter before taking top 20 to show random selections rather than guaranteed hits
|
||||
filtered_counts = {
|
||||
name: count for name, count in card_counts.items()
|
||||
if not _is_guaranteed_card(name) and name not in include_cards_set
|
||||
}
|
||||
most_common = Counter(filtered_counts).most_common(20)
|
||||
|
||||
return {
|
||||
"total_unique_cards": total_unique_cards,
|
||||
"cards_in_all": cards_in_all,
|
||||
"cards_in_most": cards_in_most,
|
||||
"cards_in_some": cards_in_some,
|
||||
"cards_in_few": cards_in_few,
|
||||
"most_common": most_common,
|
||||
"total_builds": total_builds
|
||||
}
|
||||
|
||||
|
||||
def _build_summary(result: Dict[str, Any], index: int) -> Dict[str, Any]:
|
||||
"""
|
||||
Create a summary of a single build for comparison display.
|
||||
|
||||
Args:
|
||||
result: Build result from orchestrator
|
||||
index: Build index
|
||||
|
||||
Returns:
|
||||
Summary dict
|
||||
"""
|
||||
# Get summary from result
|
||||
summary = result.get("summary", {})
|
||||
if not isinstance(summary, dict):
|
||||
summary = {}
|
||||
|
||||
# Get type breakdown which contains card counts
|
||||
type_breakdown = summary.get("type_breakdown", {})
|
||||
if not isinstance(type_breakdown, dict):
|
||||
type_breakdown = {}
|
||||
|
||||
# Get counts directly from type breakdown
|
||||
counts = type_breakdown.get("counts", {})
|
||||
|
||||
# Use standardized keys from type breakdown
|
||||
creatures = counts.get("Creature", 0)
|
||||
lands = counts.get("Land", 0)
|
||||
artifacts = counts.get("Artifact", 0)
|
||||
enchantments = counts.get("Enchantment", 0)
|
||||
instants = counts.get("Instant", 0)
|
||||
sorceries = counts.get("Sorcery", 0)
|
||||
planeswalkers = counts.get("Planeswalker", 0)
|
||||
|
||||
# Get total from type breakdown
|
||||
total_cards = type_breakdown.get("total", 0)
|
||||
|
||||
# Get all cards from type breakdown cards dict
|
||||
all_cards = []
|
||||
type_cards = type_breakdown.get("cards", {})
|
||||
if isinstance(type_cards, dict):
|
||||
for card_list in type_cards.values():
|
||||
if isinstance(card_list, list):
|
||||
all_cards.extend(card_list)
|
||||
|
||||
return {
|
||||
"index": index,
|
||||
"build_number": index + 1,
|
||||
"total_cards": total_cards,
|
||||
"creatures": creatures,
|
||||
"lands": lands,
|
||||
"artifacts": artifacts,
|
||||
"enchantments": enchantments,
|
||||
"instants": instants,
|
||||
"sorceries": sorceries,
|
||||
"planeswalkers": planeswalkers,
|
||||
"cards": all_cards,
|
||||
"result": result
|
||||
}
|
||||
|
||||
|
||||
@router.post("/compare/{batch_id}/export")
|
||||
async def export_batch(request: Request, batch_id: str):
|
||||
"""
|
||||
Export all decks in a batch as a ZIP archive.
|
||||
|
||||
Args:
|
||||
request: FastAPI request object
|
||||
batch_id: Batch identifier
|
||||
|
||||
Returns:
|
||||
ZIP file with all deck CSV/TXT files + summary JSON
|
||||
"""
|
||||
import zipfile
|
||||
import io
|
||||
import json
|
||||
from pathlib import Path
|
||||
from fastapi.responses import StreamingResponse
|
||||
from datetime import datetime
|
||||
|
||||
sid = request.cookies.get("sid") or new_sid()
|
||||
sess = get_session(sid)
|
||||
|
||||
# Get batch data
|
||||
batch_status = BuildCache.get_batch_status(sess, batch_id)
|
||||
if not batch_status:
|
||||
return {"error": f"Batch {batch_id} not found"}
|
||||
|
||||
builds = BuildCache.get_batch_builds(sess, batch_id)
|
||||
config = BuildCache.get_batch_config(sess, batch_id)
|
||||
|
||||
if not builds:
|
||||
return {"error": "No completed builds found in this batch"}
|
||||
|
||||
# Create ZIP in memory
|
||||
zip_buffer = io.BytesIO()
|
||||
|
||||
with zipfile.ZipFile(zip_buffer, 'w', zipfile.ZIP_DEFLATED) as zip_file:
|
||||
# Collect all deck files
|
||||
commander_name = config.get("commander", "Unknown").replace("/", "-")
|
||||
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
||||
|
||||
for i, build in enumerate(builds):
|
||||
result = build.get("result", {})
|
||||
csv_path = result.get("csv_path")
|
||||
txt_path = result.get("txt_path")
|
||||
|
||||
# Add CSV file
|
||||
if csv_path and Path(csv_path).exists():
|
||||
filename = f"Build_{i+1}_{commander_name}.csv"
|
||||
with open(csv_path, 'rb') as f:
|
||||
zip_file.writestr(filename, f.read())
|
||||
|
||||
# Add TXT file
|
||||
if txt_path and Path(txt_path).exists():
|
||||
filename = f"Build_{i+1}_{commander_name}.txt"
|
||||
with open(txt_path, 'rb') as f:
|
||||
zip_file.writestr(filename, f.read())
|
||||
|
||||
# Add batch summary JSON
|
||||
summary_data = {
|
||||
"batch_id": batch_id,
|
||||
"commander": config.get("commander"),
|
||||
"themes": config.get("tags", []),
|
||||
"bracket": config.get("bracket"),
|
||||
"build_count": len(builds),
|
||||
"exported_at": timestamp,
|
||||
"builds": [
|
||||
{
|
||||
"build_number": i + 1,
|
||||
"csv_file": f"Build_{i+1}_{commander_name}.csv",
|
||||
"txt_file": f"Build_{i+1}_{commander_name}.txt"
|
||||
}
|
||||
for i in range(len(builds))
|
||||
]
|
||||
}
|
||||
zip_file.writestr("batch_summary.json", json.dumps(summary_data, indent=2))
|
||||
|
||||
# Prepare response
|
||||
zip_buffer.seek(0)
|
||||
zip_filename = f"{commander_name}_Batch_{timestamp}.zip"
|
||||
|
||||
return StreamingResponse(
|
||||
iter([zip_buffer.getvalue()]),
|
||||
media_type="application/zip",
|
||||
headers={
|
||||
"Content-Disposition": f'attachment; filename="{zip_filename}"'
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@router.post("/compare/{batch_id}/rebuild")
|
||||
async def rebuild_batch(request: Request, batch_id: str):
|
||||
"""
|
||||
Rebuild the same configuration with the same build count.
|
||||
Creates a new batch with identical settings and redirects to batch progress.
|
||||
|
||||
Args:
|
||||
request: FastAPI request object
|
||||
batch_id: Original batch identifier
|
||||
|
||||
Returns:
|
||||
Redirect to new batch progress page
|
||||
"""
|
||||
from fastapi.responses import RedirectResponse
|
||||
from ..services.multi_build_orchestrator import MultiBuildOrchestrator
|
||||
|
||||
sid = request.cookies.get("sid") or new_sid()
|
||||
sess = get_session(sid)
|
||||
|
||||
# Get original config and build count
|
||||
config = BuildCache.get_batch_config(sess, batch_id)
|
||||
batch_status = BuildCache.get_batch_status(sess, batch_id)
|
||||
|
||||
if not config or not batch_status:
|
||||
return RedirectResponse(url="/build", status_code=302)
|
||||
|
||||
# Get build count from original batch
|
||||
build_count = batch_status.get("total_builds", 1)
|
||||
|
||||
# Create new batch with same config
|
||||
orchestrator = MultiBuildOrchestrator()
|
||||
new_batch_id = orchestrator.queue_builds(config, build_count, sid)
|
||||
|
||||
# Start builds in background
|
||||
import asyncio
|
||||
asyncio.create_task(orchestrator.run_batch_parallel(new_batch_id))
|
||||
|
||||
# Redirect to new batch progress
|
||||
response = RedirectResponse(url=f"/build/batch/{new_batch_id}/progress", status_code=302)
|
||||
response.set_cookie("sid", sid, httponly=True, samesite="lax")
|
||||
return response
|
||||
|
||||
|
||||
@router.post("/compare/{batch_id}/build-synergy")
|
||||
async def build_synergy_deck(request: Request, batch_id: str) -> HTMLResponse:
|
||||
"""
|
||||
Build a synergy deck from batch builds.
|
||||
|
||||
Analyzes all builds in the batch and creates an optimized "best-of" deck
|
||||
by scoring cards based on frequency, EDHREC rank, and theme alignment.
|
||||
"""
|
||||
sid = request.cookies.get("sid") or new_sid()
|
||||
sess = get_session(sid)
|
||||
|
||||
# Get batch data
|
||||
builds = BuildCache.get_batch_builds(sess, batch_id)
|
||||
config = BuildCache.get_batch_config(sess, batch_id)
|
||||
batch_status = BuildCache.get_batch_status(sess, batch_id)
|
||||
|
||||
if not builds or not config or not batch_status:
|
||||
return HTMLResponse(
|
||||
content=f'<div class="error-message">Batch {batch_id} not found or has no builds</div>',
|
||||
status_code=404
|
||||
)
|
||||
|
||||
start_time = time.time()
|
||||
|
||||
try:
|
||||
# Analyze and build synergy deck
|
||||
synergy_deck = analyze_and_build_synergy_deck(builds, config)
|
||||
|
||||
elapsed_ms = int((time.time() - start_time) * 1000)
|
||||
|
||||
logger.info(
|
||||
f"[Synergy] Built deck for batch {batch_id}: "
|
||||
f"{synergy_deck['total_cards']} cards, "
|
||||
f"avg_score={synergy_deck['avg_score']}, "
|
||||
f"elapsed={elapsed_ms}ms"
|
||||
)
|
||||
|
||||
# Prepare cards_by_category for template
|
||||
cards_by_category = {
|
||||
category: [
|
||||
{
|
||||
"name": card.name,
|
||||
"frequency": card.frequency,
|
||||
"synergy_score": card.synergy_score,
|
||||
"appearance_count": card.appearance_count,
|
||||
"role": card.role,
|
||||
"tags": card.tags,
|
||||
"type_line": card.type_line,
|
||||
"count": card.count
|
||||
}
|
||||
for card in cards
|
||||
]
|
||||
for category, cards in synergy_deck["by_category"].items()
|
||||
}
|
||||
|
||||
# Render preview template
|
||||
return templates.TemplateResponse("compare/_synergy_preview.html", {
|
||||
"request": request,
|
||||
"batch_id": batch_id,
|
||||
"synergy_deck": {
|
||||
"total_cards": synergy_deck["total_cards"],
|
||||
"avg_frequency": synergy_deck["avg_frequency"],
|
||||
"avg_score": synergy_deck["avg_score"],
|
||||
"high_frequency_count": synergy_deck["high_frequency_count"],
|
||||
"cards_by_category": cards_by_category
|
||||
},
|
||||
"total_builds": len(builds),
|
||||
"build_time_ms": elapsed_ms
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"[Synergy] Error building synergy deck: {e}", exc_info=True)
|
||||
return HTMLResponse(
|
||||
content=f'<div class="error-message">Failed to build synergy deck: {str(e)}</div>',
|
||||
status_code=500
|
||||
)
|
||||
|
||||
|
||||
@router.post("/compare/{batch_id}/export-synergy")
|
||||
async def export_synergy_deck(request: Request, batch_id: str):
|
||||
"""
|
||||
Export the synergy deck as CSV and TXT files in a ZIP archive.
|
||||
|
||||
Args:
|
||||
request: FastAPI request object
|
||||
batch_id: Batch identifier
|
||||
|
||||
Returns:
|
||||
ZIP file with synergy deck CSV/TXT files
|
||||
"""
|
||||
import io
|
||||
import csv
|
||||
import zipfile
|
||||
import json
|
||||
from fastapi.responses import StreamingResponse
|
||||
from datetime import datetime
|
||||
|
||||
sid = request.cookies.get("sid") or new_sid()
|
||||
sess = get_session(sid)
|
||||
|
||||
# Get batch data
|
||||
batch_status = BuildCache.get_batch_status(sess, batch_id)
|
||||
if not batch_status:
|
||||
return {"error": f"Batch {batch_id} not found"}
|
||||
|
||||
builds = BuildCache.get_batch_builds(sess, batch_id)
|
||||
config = BuildCache.get_batch_config(sess, batch_id)
|
||||
|
||||
if not builds:
|
||||
return {"error": "No completed builds found in this batch"}
|
||||
|
||||
# Build synergy deck (reuse the existing logic)
|
||||
from code.web.services.synergy_builder import analyze_and_build_synergy_deck
|
||||
|
||||
try:
|
||||
synergy_deck = analyze_and_build_synergy_deck(
|
||||
builds=builds,
|
||||
config=config
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"[Export Synergy] Error building synergy deck: {e}", exc_info=True)
|
||||
return {"error": f"Failed to build synergy deck: {str(e)}"}
|
||||
|
||||
# Prepare file names
|
||||
commander_name = config.get("commander", "Unknown").replace("/", "-").replace(" ", "")
|
||||
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
||||
base_filename = f"{commander_name}_Synergy_{timestamp}"
|
||||
|
||||
# Prepare deck_files directory
|
||||
from pathlib import Path
|
||||
deck_files_dir = Path("deck_files")
|
||||
deck_files_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Create CSV content
|
||||
csv_buffer = io.StringIO()
|
||||
csv_writer = csv.writer(csv_buffer)
|
||||
|
||||
# CSV Header
|
||||
csv_writer.writerow([
|
||||
"Name", "Count", "Category", "Role", "Frequency", "Synergy Score",
|
||||
"Appearance Count", "Tags", "Type"
|
||||
])
|
||||
|
||||
# CSV Rows - sort by category
|
||||
category_order = ["Land", "Creature", "Artifact", "Enchantment", "Instant", "Sorcery", "Planeswalker", "Battle"]
|
||||
by_category = synergy_deck.get("by_category", {})
|
||||
|
||||
for category in category_order:
|
||||
cards = by_category.get(category, [])
|
||||
for card in cards:
|
||||
csv_writer.writerow([
|
||||
card.name,
|
||||
card.count,
|
||||
card.category,
|
||||
card.role,
|
||||
f"{card.frequency:.2%}",
|
||||
f"{card.synergy_score:.2f}",
|
||||
card.appearance_count,
|
||||
"|".join(card.tags) if card.tags else "",
|
||||
card.type_line
|
||||
])
|
||||
|
||||
csv_content = csv_buffer.getvalue()
|
||||
|
||||
# Create TXT content (Moxfield/EDHREC format)
|
||||
txt_buffer = io.StringIO()
|
||||
|
||||
# TXT Header
|
||||
txt_buffer.write(f"# Synergy Deck - {commander_name}\n")
|
||||
txt_buffer.write(f"# Commander: {config.get('commander', 'Unknown')}\n")
|
||||
txt_buffer.write(f"# Colors: {', '.join(config.get('colors', []))}\n")
|
||||
txt_buffer.write(f"# Themes: {', '.join(config.get('tags', []))}\n")
|
||||
txt_buffer.write(f"# Generated from {len(builds)} builds\n")
|
||||
txt_buffer.write(f"# Total Cards: {synergy_deck['total_cards']}\n")
|
||||
txt_buffer.write(f"# Avg Frequency: {synergy_deck['avg_frequency']:.1%}\n")
|
||||
txt_buffer.write(f"# Avg Synergy Score: {synergy_deck['avg_score']:.2f}\n")
|
||||
txt_buffer.write("\n")
|
||||
|
||||
# TXT Card list
|
||||
for category in category_order:
|
||||
cards = by_category.get(category, [])
|
||||
if not cards:
|
||||
continue
|
||||
|
||||
for card in cards:
|
||||
line = f"{card.count} {card.name}"
|
||||
if card.count > 1:
|
||||
# Show count prominently for multi-copy cards
|
||||
txt_buffer.write(f"{line}\n")
|
||||
else:
|
||||
txt_buffer.write(f"1 {card.name}\n")
|
||||
|
||||
txt_content = txt_buffer.getvalue()
|
||||
|
||||
# Save CSV and TXT to deck_files directory
|
||||
csv_path = deck_files_dir / f"{base_filename}.csv"
|
||||
txt_path = deck_files_dir / f"{base_filename}.txt"
|
||||
summary_path = deck_files_dir / f"{base_filename}.summary.json"
|
||||
compliance_path = deck_files_dir / f"{base_filename}_compliance.json"
|
||||
|
||||
try:
|
||||
csv_path.write_text(csv_content, encoding='utf-8')
|
||||
txt_path.write_text(txt_content, encoding='utf-8')
|
||||
|
||||
# Create summary JSON (similar to individual builds)
|
||||
summary_data = {
|
||||
"commander": config.get("commander", "Unknown"),
|
||||
"tags": config.get("tags", []),
|
||||
"colors": config.get("colors", []),
|
||||
"bracket_level": config.get("bracket"),
|
||||
"csv": str(csv_path),
|
||||
"txt": str(txt_path),
|
||||
"synergy_stats": {
|
||||
"total_cards": synergy_deck["total_cards"],
|
||||
"unique_cards": synergy_deck.get("unique_cards", len(synergy_deck["cards"])),
|
||||
"avg_frequency": synergy_deck["avg_frequency"],
|
||||
"avg_score": synergy_deck["avg_score"],
|
||||
"high_frequency_count": synergy_deck["high_frequency_count"],
|
||||
"source_builds": len(builds)
|
||||
},
|
||||
"exported_at": timestamp
|
||||
}
|
||||
summary_path.write_text(json.dumps(summary_data, indent=2), encoding='utf-8')
|
||||
|
||||
# Create compliance JSON (basic compliance for synergy deck)
|
||||
compliance_data = {
|
||||
"overall": "N/A",
|
||||
"message": "Synergy deck - compliance checking not applicable",
|
||||
"deck_size": synergy_deck["total_cards"],
|
||||
"commander": config.get("commander", "Unknown"),
|
||||
"source": "synergy_builder",
|
||||
"build_count": len(builds)
|
||||
}
|
||||
compliance_path.write_text(json.dumps(compliance_data, indent=2), encoding='utf-8')
|
||||
|
||||
logger.info(f"[Export Synergy] Saved synergy deck to {csv_path} and {txt_path}")
|
||||
except Exception as e:
|
||||
logger.error(f"[Export Synergy] Failed to save files to disk: {e}", exc_info=True)
|
||||
|
||||
# Delete batch build files to avoid clutter
|
||||
deleted_files = []
|
||||
for build in builds:
|
||||
result = build.get("result", {})
|
||||
csv_file = result.get("csv_path")
|
||||
txt_file = result.get("txt_path")
|
||||
summary_file = result.get("summary_path")
|
||||
|
||||
# Delete CSV file
|
||||
if csv_file:
|
||||
csv_p = Path(csv_file)
|
||||
if csv_p.exists():
|
||||
try:
|
||||
csv_p.unlink()
|
||||
deleted_files.append(csv_p.name)
|
||||
except Exception as e:
|
||||
logger.warning(f"[Export Synergy] Failed to delete {csv_file}: {e}")
|
||||
|
||||
# Delete TXT file
|
||||
if txt_file:
|
||||
txt_p = Path(txt_file)
|
||||
if txt_p.exists():
|
||||
try:
|
||||
txt_p.unlink()
|
||||
deleted_files.append(txt_p.name)
|
||||
except Exception as e:
|
||||
logger.warning(f"[Export Synergy] Failed to delete {txt_file}: {e}")
|
||||
|
||||
# Delete summary JSON file
|
||||
if summary_file:
|
||||
summary_p = Path(summary_file)
|
||||
if summary_p.exists():
|
||||
try:
|
||||
summary_p.unlink()
|
||||
deleted_files.append(summary_p.name)
|
||||
except Exception as e:
|
||||
logger.warning(f"[Export Synergy] Failed to delete {summary_file}: {e}")
|
||||
|
||||
if deleted_files:
|
||||
logger.info(f"[Export Synergy] Cleaned up {len(deleted_files)} batch build files")
|
||||
|
||||
# Mark batch as having synergy exported (to disable batch export button)
|
||||
BuildCache.mark_synergy_exported(sess, batch_id)
|
||||
|
||||
# Create ZIP in memory for download
|
||||
zip_buffer = io.BytesIO()
|
||||
|
||||
with zipfile.ZipFile(zip_buffer, 'w', zipfile.ZIP_DEFLATED) as zip_file:
|
||||
# Add CSV to ZIP
|
||||
zip_file.writestr(f"{base_filename}.csv", csv_content)
|
||||
|
||||
# Add TXT to ZIP
|
||||
zip_file.writestr(f"{base_filename}.txt", txt_content)
|
||||
|
||||
# Add summary JSON to ZIP
|
||||
summary_json = json.dumps(summary_data, indent=2)
|
||||
zip_file.writestr(f"{base_filename}.summary.json", summary_json)
|
||||
|
||||
# Add compliance JSON to ZIP
|
||||
compliance_json = json.dumps(compliance_data, indent=2)
|
||||
zip_file.writestr(f"{base_filename}_compliance.json", compliance_json)
|
||||
|
||||
# Add metadata JSON (export-specific info)
|
||||
metadata = {
|
||||
"batch_id": batch_id,
|
||||
"commander": config.get("commander"),
|
||||
"themes": config.get("tags", []),
|
||||
"colors": config.get("colors", []),
|
||||
"bracket": config.get("bracket"),
|
||||
"build_count": len(builds),
|
||||
"exported_at": timestamp,
|
||||
"synergy_stats": {
|
||||
"total_cards": synergy_deck["total_cards"],
|
||||
"avg_frequency": synergy_deck["avg_frequency"],
|
||||
"avg_score": synergy_deck["avg_score"],
|
||||
"high_frequency_count": synergy_deck["high_frequency_count"]
|
||||
},
|
||||
"cleaned_up_files": len(deleted_files)
|
||||
}
|
||||
zip_file.writestr("synergy_metadata.json", json.dumps(metadata, indent=2))
|
||||
|
||||
# Prepare response
|
||||
zip_buffer.seek(0)
|
||||
zip_filename = f"{base_filename}.zip"
|
||||
|
||||
return StreamingResponse(
|
||||
iter([zip_buffer.getvalue()]),
|
||||
media_type="application/zip",
|
||||
headers={
|
||||
"Content-Disposition": f'attachment; filename="{zip_filename}"'
|
||||
}
|
||||
)
|
||||
Loading…
Add table
Add a link
Reference in a new issue