mirror of
https://github.com/mwisnowski/mtg_python_deckbuilder.git
synced 2025-12-17 08:00:13 +01:00
feat(themes): whitelist governance, synergy cap, docs + tests; feat(random): laid roadwork for random implementation, testing in headless confirmed
This commit is contained in:
parent
03e839fb87
commit
16261bbf09
34 changed files with 12594 additions and 23 deletions
15
code/tests/test_builder_rng_seeded_stream.py
Normal file
15
code/tests/test_builder_rng_seeded_stream.py
Normal file
|
|
@ -0,0 +1,15 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from deck_builder.builder import DeckBuilder
|
||||
|
||||
|
||||
def test_builder_rng_same_seed_identical_streams():
|
||||
b1 = DeckBuilder()
|
||||
b1.set_seed('alpha')
|
||||
seq1 = [b1.rng.random() for _ in range(5)]
|
||||
|
||||
b2 = DeckBuilder()
|
||||
b2.set_seed('alpha')
|
||||
seq2 = [b2.rng.random() for _ in range(5)]
|
||||
|
||||
assert seq1 == seq2
|
||||
33
code/tests/test_deterministic_sampling.py
Normal file
33
code/tests/test_deterministic_sampling.py
Normal file
|
|
@ -0,0 +1,33 @@
|
|||
from deck_builder import builder_utils as bu
|
||||
from random_util import set_seed
|
||||
|
||||
|
||||
def test_weighted_sample_deterministic_same_seed():
|
||||
pool = [("a", 1), ("b", 2), ("c", 3), ("d", 4)]
|
||||
k = 3
|
||||
rng1 = set_seed(12345)
|
||||
sel1 = bu.weighted_sample_without_replacement(pool, k, rng=rng1)
|
||||
# Reset to the same seed and expect the same selection order
|
||||
rng2 = set_seed(12345)
|
||||
sel2 = bu.weighted_sample_without_replacement(pool, k, rng=rng2)
|
||||
assert sel1 == sel2
|
||||
|
||||
|
||||
def test_compute_adjusted_target_deterministic_same_seed():
|
||||
# Use a simple output func that collects messages (but we don't assert on them here)
|
||||
msgs: list[str] = []
|
||||
out = msgs.append
|
||||
original_cfg = 10
|
||||
existing = 4
|
||||
|
||||
rng1 = set_seed(999)
|
||||
to_add1, bonus1 = bu.compute_adjusted_target(
|
||||
"Ramp", original_cfg, existing, out, plural_word="ramp spells", rng=rng1
|
||||
)
|
||||
|
||||
rng2 = set_seed(999)
|
||||
to_add2, bonus2 = bu.compute_adjusted_target(
|
||||
"Ramp", original_cfg, existing, out, plural_word="ramp spells", rng=rng2
|
||||
)
|
||||
|
||||
assert (to_add1, bonus1) == (to_add2, bonus2)
|
||||
22
code/tests/test_random_build_api.py
Normal file
22
code/tests/test_random_build_api.py
Normal file
|
|
@ -0,0 +1,22 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import importlib
|
||||
import os
|
||||
from starlette.testclient import TestClient
|
||||
|
||||
|
||||
def test_random_build_api_commander_and_seed(monkeypatch):
|
||||
# Enable Random Modes and use tiny dataset
|
||||
monkeypatch.setenv("RANDOM_MODES", "1")
|
||||
monkeypatch.setenv("CSV_FILES_DIR", os.path.join("csv_files", "testdata"))
|
||||
|
||||
app_module = importlib.import_module('code.web.app')
|
||||
client = TestClient(app_module.app)
|
||||
|
||||
payload = {"seed": 12345, "theme": "Goblin Kindred"}
|
||||
r = client.post('/api/random_build', json=payload)
|
||||
assert r.status_code == 200
|
||||
data = r.json()
|
||||
assert data["seed"] == 12345
|
||||
assert isinstance(data.get("commander"), str)
|
||||
assert data.get("commander")
|
||||
21
code/tests/test_random_determinism.py
Normal file
21
code/tests/test_random_determinism.py
Normal file
|
|
@ -0,0 +1,21 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from deck_builder.random_entrypoint import build_random_deck
|
||||
|
||||
|
||||
def test_random_build_is_deterministic_with_seed(monkeypatch):
|
||||
# Force deterministic tiny dataset
|
||||
monkeypatch.setenv("CSV_FILES_DIR", os.path.join("csv_files", "testdata"))
|
||||
# Fixed seed should produce same commander consistently
|
||||
out1 = build_random_deck(seed=12345)
|
||||
out2 = build_random_deck(seed=12345)
|
||||
assert out1.commander == out2.commander
|
||||
assert out1.seed == out2.seed
|
||||
|
||||
|
||||
def test_random_build_uses_theme_when_available(monkeypatch):
|
||||
monkeypatch.setenv("CSV_FILES_DIR", os.path.join("csv_files", "testdata"))
|
||||
# On tiny dataset, provide a theme that exists or not; either path should not crash
|
||||
res = build_random_deck(theme="Goblin Kindred", seed=42)
|
||||
assert isinstance(res.commander, str) and len(res.commander) > 0
|
||||
25
code/tests/test_random_full_build_api.py
Normal file
25
code/tests/test_random_full_build_api.py
Normal file
|
|
@ -0,0 +1,25 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import importlib
|
||||
import os
|
||||
from starlette.testclient import TestClient
|
||||
|
||||
|
||||
def test_random_full_build_api_returns_deck_and_permalink(monkeypatch):
|
||||
# Enable Random Modes and use tiny dataset
|
||||
monkeypatch.setenv("RANDOM_MODES", "1")
|
||||
monkeypatch.setenv("CSV_FILES_DIR", os.path.join("csv_files", "testdata"))
|
||||
|
||||
app_module = importlib.import_module('code.web.app')
|
||||
client = TestClient(app_module.app)
|
||||
|
||||
payload = {"seed": 4242, "theme": "Goblin Kindred"}
|
||||
r = client.post('/api/random_full_build', json=payload)
|
||||
assert r.status_code == 200
|
||||
data = r.json()
|
||||
assert data["seed"] == 4242
|
||||
assert isinstance(data.get("commander"), str) and data["commander"]
|
||||
assert isinstance(data.get("decklist"), list)
|
||||
# Permalink present and shaped like /build/from?state=...
|
||||
assert data.get("permalink")
|
||||
assert "/build/from?state=" in data["permalink"]
|
||||
17
code/tests/test_random_full_build_determinism.py
Normal file
17
code/tests/test_random_full_build_determinism.py
Normal file
|
|
@ -0,0 +1,17 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from deck_builder.random_entrypoint import build_random_full_deck
|
||||
|
||||
|
||||
def test_random_full_build_is_deterministic_on_frozen_dataset(monkeypatch):
|
||||
# Use frozen dataset for determinism
|
||||
monkeypatch.setenv("CSV_FILES_DIR", os.path.join("csv_files", "testdata"))
|
||||
# Fixed seed should produce the same compact decklist
|
||||
out1 = build_random_full_deck(theme="Goblin Kindred", seed=777)
|
||||
out2 = build_random_full_deck(theme="Goblin Kindred", seed=777)
|
||||
|
||||
assert out1.seed == out2.seed == 777
|
||||
assert out1.commander == out2.commander
|
||||
assert isinstance(out1.decklist, list) and isinstance(out2.decklist, list)
|
||||
assert out1.decklist == out2.decklist
|
||||
45
code/tests/test_random_reroll_endpoints.py
Normal file
45
code/tests/test_random_reroll_endpoints.py
Normal file
|
|
@ -0,0 +1,45 @@
|
|||
import os
|
||||
import json
|
||||
|
||||
import pytest
|
||||
|
||||
from fastapi.testclient import TestClient
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def client():
|
||||
# Ensure flags and frozen dataset
|
||||
os.environ["RANDOM_MODES"] = "1"
|
||||
os.environ["RANDOM_UI"] = "1"
|
||||
os.environ["CSV_FILES_DIR"] = os.path.join("csv_files", "testdata")
|
||||
|
||||
from web.app import app
|
||||
|
||||
with TestClient(app) as c:
|
||||
yield c
|
||||
|
||||
|
||||
def test_api_random_reroll_increments_seed(client: TestClient):
|
||||
r1 = client.post("/api/random_full_build", json={"seed": 123})
|
||||
assert r1.status_code == 200, r1.text
|
||||
data1 = r1.json()
|
||||
assert data1.get("seed") == 123
|
||||
|
||||
r2 = client.post("/api/random_reroll", json={"seed": 123})
|
||||
assert r2.status_code == 200, r2.text
|
||||
data2 = r2.json()
|
||||
assert data2.get("seed") == 124
|
||||
assert data2.get("permalink")
|
||||
|
||||
|
||||
def test_hx_random_reroll_returns_html(client: TestClient):
|
||||
headers = {"HX-Request": "true", "Content-Type": "application/json"}
|
||||
r = client.post("/hx/random_reroll", data=json.dumps({"seed": 42}), headers=headers)
|
||||
assert r.status_code == 200, r.text
|
||||
# Accept either HTML fragment or JSON fallback
|
||||
content_type = r.headers.get("content-type", "")
|
||||
if "text/html" in content_type:
|
||||
assert "Seed:" in r.text
|
||||
else:
|
||||
j = r.json()
|
||||
assert j.get("seed") in (42, 43) # depends on increment policy
|
||||
37
code/tests/test_random_util.py
Normal file
37
code/tests/test_random_util.py
Normal file
|
|
@ -0,0 +1,37 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from random_util import derive_seed_from_string, set_seed, get_random, generate_seed
|
||||
|
||||
|
||||
def test_derive_seed_from_string_stable():
|
||||
# Known value derived from SHA-256('test-seed') first 8 bytes masked to 63 bits
|
||||
assert derive_seed_from_string('test-seed') == 6214070892065607348
|
||||
# Int passthrough-like behavior (normalized to positive 63-bit)
|
||||
assert derive_seed_from_string(42) == 42
|
||||
assert derive_seed_from_string(-42) == 42
|
||||
|
||||
|
||||
def test_set_seed_deterministic_stream():
|
||||
r1 = set_seed('alpha')
|
||||
r2 = set_seed('alpha')
|
||||
seq1 = [r1.random() for _ in range(5)]
|
||||
seq2 = [r2.random() for _ in range(5)]
|
||||
assert seq1 == seq2
|
||||
|
||||
|
||||
def test_get_random_unseeded_independent():
|
||||
a = get_random()
|
||||
b = get_random()
|
||||
# Advance a few steps
|
||||
_ = [a.random() for _ in range(3)]
|
||||
_ = [b.random() for _ in range(3)]
|
||||
# They should not be the same object and streams should diverge vs seeded
|
||||
assert a is not b
|
||||
|
||||
|
||||
def test_generate_seed_range():
|
||||
s = generate_seed()
|
||||
assert isinstance(s, int)
|
||||
assert s >= 0
|
||||
# Ensure it's within 63-bit range
|
||||
assert s < (1 << 63)
|
||||
18
code/tests/test_seeded_builder_minimal.py
Normal file
18
code/tests/test_seeded_builder_minimal.py
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from code.headless_runner import run
|
||||
|
||||
|
||||
def test_headless_seed_threads_into_builder(monkeypatch):
|
||||
# Use the tiny test dataset for speed/determinism
|
||||
monkeypatch.setenv("CSV_FILES_DIR", os.path.join("csv_files", "testdata"))
|
||||
# Use a commander known to be in tiny dataset or fallback path; we rely on search/confirm flow
|
||||
# Provide a simple name that will fuzzy match one of the entries.
|
||||
out1 = run(command_name="Krenko", seed=999)
|
||||
out2 = run(command_name="Krenko", seed=999)
|
||||
# Determinism: the seed should be set on the builder and identical across runs
|
||||
assert getattr(out1, "seed", None) == getattr(out2, "seed", None) == 999
|
||||
# Basic sanity: commander selection should have occurred
|
||||
assert isinstance(getattr(out1, "commander_name", ""), str)
|
||||
assert isinstance(getattr(out2, "commander_name", ""), str)
|
||||
84
code/tests/test_theme_whitelist_and_synergy_cap.py
Normal file
84
code/tests/test_theme_whitelist_and_synergy_cap.py
Normal file
|
|
@ -0,0 +1,84 @@
|
|||
import json
|
||||
import subprocess
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
# This test validates that the whitelist governance + synergy cap logic
|
||||
# (implemented in extract_themes.py and theme_whitelist.yml) behaves as expected.
|
||||
# It focuses on a handful of anchor themes to keep runtime fast and deterministic.
|
||||
|
||||
ROOT = Path(__file__).resolve().parents[2]
|
||||
SCRIPT = ROOT / "code" / "scripts" / "extract_themes.py"
|
||||
OUTPUT_JSON = ROOT / "config" / "themes" / "theme_list.json"
|
||||
|
||||
|
||||
def run_extractor():
|
||||
# Re-run extraction so the test always evaluates fresh output.
|
||||
# Using the current python executable ensures we run inside the active venv.
|
||||
result = subprocess.run([sys.executable, str(SCRIPT)], capture_output=True, text=True)
|
||||
assert result.returncode == 0, f"extract_themes.py failed: {result.stderr or result.stdout}"
|
||||
assert OUTPUT_JSON.exists(), "Expected theme_list.json to be generated"
|
||||
|
||||
|
||||
def load_themes():
|
||||
data = json.loads(OUTPUT_JSON.read_text(encoding="utf-8"))
|
||||
themes = data.get("themes", [])
|
||||
mapping = {t["theme"]: t for t in themes if isinstance(t, dict) and "theme" in t}
|
||||
return mapping
|
||||
|
||||
|
||||
def assert_contains(theme_map, theme_name):
|
||||
assert theme_name in theme_map, f"Expected theme '{theme_name}' in generated theme list"
|
||||
|
||||
|
||||
def test_synergy_cap_and_enforced_inclusions():
|
||||
run_extractor()
|
||||
theme_map = load_themes()
|
||||
|
||||
# Target anchors to validate
|
||||
anchors = [
|
||||
"+1/+1 Counters",
|
||||
"-1/-1 Counters",
|
||||
"Counters Matter",
|
||||
"Reanimate",
|
||||
"Outlaw Kindred",
|
||||
]
|
||||
for a in anchors:
|
||||
assert_contains(theme_map, a)
|
||||
|
||||
# Synergy cap check (<=5)
|
||||
for a in anchors:
|
||||
syn = theme_map[a]["synergies"]
|
||||
assert len(syn) <= 5, f"Synergy cap violated for {a}: {syn} (len={len(syn)})"
|
||||
|
||||
# Enforced synergies for counters cluster
|
||||
plus_syn = set(theme_map["+1/+1 Counters"]["synergies"])
|
||||
assert {"Proliferate", "Counters Matter"}.issubset(plus_syn), "+1/+1 Counters missing enforced synergies"
|
||||
|
||||
minus_syn = set(theme_map["-1/-1 Counters"]["synergies"])
|
||||
assert {"Proliferate", "Counters Matter"}.issubset(minus_syn), "-1/-1 Counters missing enforced synergies"
|
||||
|
||||
counters_matter_syn = set(theme_map["Counters Matter"]["synergies"])
|
||||
assert "Proliferate" in counters_matter_syn, "Counters Matter should include Proliferate"
|
||||
|
||||
# Reanimate anchor (enforced synergy to Graveyard Matters retained while capped)
|
||||
reanimate_syn = theme_map["Reanimate"]["synergies"]
|
||||
assert "Graveyard Matters" in reanimate_syn, "Reanimate should include Graveyard Matters"
|
||||
assert "Enter the Battlefield" in reanimate_syn, "Reanimate should include Enter the Battlefield (curated)"
|
||||
|
||||
# Outlaw Kindred - curated list should remain exactly its 5 intrinsic sub-tribes
|
||||
outlaw_expected = {"Warlock Kindred", "Pirate Kindred", "Rogue Kindred", "Assassin Kindred", "Mercenary Kindred"}
|
||||
outlaw_syn = set(theme_map["Outlaw Kindred"]["synergies"])
|
||||
assert outlaw_syn == outlaw_expected, f"Outlaw Kindred synergies mismatch. Expected {outlaw_expected}, got {outlaw_syn}"
|
||||
|
||||
# No enforced synergy should be silently truncated if it was required (already ensured by ordering + length checks)
|
||||
# Additional safety: ensure every enforced synergy appears in its anchor (sampling a subset)
|
||||
for anchor, required in {
|
||||
"+1/+1 Counters": ["Proliferate", "Counters Matter"],
|
||||
"-1/-1 Counters": ["Proliferate", "Counters Matter"],
|
||||
"Reanimate": ["Graveyard Matters"],
|
||||
}.items():
|
||||
present = set(theme_map[anchor]["synergies"])
|
||||
missing = [r for r in required if r not in present]
|
||||
assert not missing, f"Anchor {anchor} missing enforced synergies: {missing}"
|
||||
|
||||
Loading…
Add table
Add a link
Reference in a new issue