test: add integration tests for all modules
- Add pytest-asyncio dev dependency and configure asyncio_mode=auto - Add filterwarnings to suppress third-party PydanticDeprecatedSince20 - Add conftest.py with shared fixtures (project_dir, write_config, etc.) - Add test_config.py: YAML loading, target type inference, model resolution - Add test_graph.py: DAG construction, cycle detection, build ordering - Add test_state.py: hash functions, state persistence, dirty checking - Add test_builder.py: full build pipeline with FakeProvider, incremental builds, selective builds, error isolation, dependency cascading - Add test_providers.py: ImageProvider and TextProvider with mocked clients - Add test_cli.py: build/clean/graph commands via typer CliRunner - All 94 tests pass with 0 basedpyright warnings
This commit is contained in:
parent
452b3c4eb0
commit
eef9712924
10 changed files with 1662 additions and 0 deletions
379
tests/test_builder.py
Normal file
379
tests/test_builder.py
Normal file
|
|
@ -0,0 +1,379 @@
|
|||
"""Integration tests for bulkgen.builder."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from pathlib import Path
|
||||
from typing import override
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
|
||||
from bulkgen.builder import (
|
||||
_collect_all_deps, # pyright: ignore[reportPrivateUsage]
|
||||
_collect_dep_files, # pyright: ignore[reportPrivateUsage]
|
||||
_collect_extra_params, # pyright: ignore[reportPrivateUsage]
|
||||
_resolve_prompt, # pyright: ignore[reportPrivateUsage]
|
||||
run_build,
|
||||
)
|
||||
from bulkgen.config import ProjectConfig, TargetConfig, TargetType
|
||||
from bulkgen.providers import Provider
|
||||
from bulkgen.state import load_state
|
||||
from tests.conftest import WriteConfig
|
||||
|
||||
|
||||
class FakeProvider(Provider):
|
||||
"""A provider that writes a marker file instead of calling an API."""
|
||||
|
||||
@override
|
||||
async def generate(
|
||||
self,
|
||||
target_name: str,
|
||||
target_config: TargetConfig,
|
||||
resolved_prompt: str,
|
||||
resolved_model: str,
|
||||
project_dir: Path,
|
||||
) -> None:
|
||||
output = project_dir / target_name
|
||||
_ = output.write_text(f"generated:{target_name}:{resolved_prompt}")
|
||||
|
||||
|
||||
class FailingProvider(Provider):
|
||||
"""A provider that always raises."""
|
||||
|
||||
@override
|
||||
async def generate(
|
||||
self,
|
||||
target_name: str,
|
||||
target_config: TargetConfig,
|
||||
resolved_prompt: str,
|
||||
resolved_model: str,
|
||||
project_dir: Path,
|
||||
) -> None:
|
||||
msg = f"Simulated failure for {target_name}"
|
||||
raise RuntimeError(msg)
|
||||
|
||||
|
||||
def _fake_providers() -> dict[TargetType, Provider]:
|
||||
return {
|
||||
TargetType.TEXT: FakeProvider(),
|
||||
TargetType.IMAGE: FakeProvider(),
|
||||
}
|
||||
|
||||
|
||||
class TestResolvePrompt:
|
||||
"""Test prompt resolution (file vs inline)."""
|
||||
|
||||
def test_inline_prompt(self, project_dir: Path) -> None:
|
||||
assert _resolve_prompt("Just a string", project_dir) == "Just a string"
|
||||
|
||||
def test_file_prompt(self, project_dir: Path, prompt_file: Path) -> None:
|
||||
result = _resolve_prompt(prompt_file.name, project_dir)
|
||||
assert result == "This prompt comes from a file"
|
||||
|
||||
def test_nonexistent_file_treated_as_inline(self, project_dir: Path) -> None:
|
||||
result = _resolve_prompt("no_such_file.txt", project_dir)
|
||||
assert result == "no_such_file.txt"
|
||||
|
||||
|
||||
class TestCollectHelpers:
|
||||
"""Test dependency collection helpers."""
|
||||
|
||||
def test_collect_dep_files(
|
||||
self, project_dir: Path, write_config: WriteConfig
|
||||
) -> None:
|
||||
_ = (project_dir / "input.txt").write_text("data")
|
||||
_ = (project_dir / "ref.png").write_bytes(b"ref")
|
||||
config = write_config(
|
||||
{
|
||||
"targets": {
|
||||
"out.png": {
|
||||
"prompt": "x",
|
||||
"inputs": ["input.txt"],
|
||||
"reference_image": "ref.png",
|
||||
"control_images": [],
|
||||
}
|
||||
}
|
||||
}
|
||||
)
|
||||
deps = _collect_dep_files("out.png", config, project_dir)
|
||||
dep_names = [d.name for d in deps]
|
||||
assert "input.txt" in dep_names
|
||||
assert "ref.png" in dep_names
|
||||
|
||||
def test_collect_extra_params(self, write_config: WriteConfig) -> None:
|
||||
config = write_config(
|
||||
{
|
||||
"targets": {
|
||||
"out.png": {
|
||||
"prompt": "x",
|
||||
"width": 512,
|
||||
"height": 256,
|
||||
"reference_image": "ref.png",
|
||||
}
|
||||
}
|
||||
}
|
||||
)
|
||||
params = _collect_extra_params("out.png", config)
|
||||
assert params["width"] == 512
|
||||
assert params["height"] == 256
|
||||
assert params["reference_image"] == "ref.png"
|
||||
|
||||
def test_collect_extra_params_empty(self, write_config: WriteConfig) -> None:
|
||||
config = write_config({"targets": {"out.txt": {"prompt": "x"}}})
|
||||
assert _collect_extra_params("out.txt", config) == {}
|
||||
|
||||
def test_collect_all_deps(self, write_config: WriteConfig) -> None:
|
||||
config = write_config(
|
||||
{
|
||||
"targets": {
|
||||
"out.png": {
|
||||
"prompt": "x",
|
||||
"inputs": ["a.txt"],
|
||||
"reference_image": "ref.png",
|
||||
"control_images": ["c1.png", "c2.png"],
|
||||
}
|
||||
}
|
||||
}
|
||||
)
|
||||
deps = _collect_all_deps("out.png", config)
|
||||
assert deps == ["a.txt", "ref.png", "c1.png", "c2.png"]
|
||||
|
||||
|
||||
class TestRunBuild:
|
||||
"""Integration tests for the full build pipeline with fake providers."""
|
||||
|
||||
async def test_build_single_text_target(
|
||||
self, project_dir: Path, simple_text_config: ProjectConfig
|
||||
) -> None:
|
||||
with patch("bulkgen.builder._create_providers", return_value=_fake_providers()):
|
||||
result = await run_build(simple_text_config, project_dir)
|
||||
|
||||
assert result.built == ["output.txt"]
|
||||
assert result.skipped == []
|
||||
assert result.failed == {}
|
||||
assert (project_dir / "output.txt").exists()
|
||||
|
||||
async def test_build_chain_dependency(
|
||||
self, project_dir: Path, multi_target_config: ProjectConfig
|
||||
) -> None:
|
||||
with patch("bulkgen.builder._create_providers", return_value=_fake_providers()):
|
||||
result = await run_build(multi_target_config, project_dir)
|
||||
|
||||
assert "summary.md" in result.built
|
||||
assert "final.txt" in result.built
|
||||
assert "hero.png" in result.built
|
||||
assert result.failed == {}
|
||||
|
||||
assert (project_dir / "summary.md").exists()
|
||||
assert (project_dir / "final.txt").exists()
|
||||
assert (project_dir / "hero.png").exists()
|
||||
|
||||
async def test_incremental_build_skips_clean_targets(
|
||||
self, project_dir: Path, simple_text_config: ProjectConfig
|
||||
) -> None:
|
||||
with patch("bulkgen.builder._create_providers", return_value=_fake_providers()):
|
||||
result1 = await run_build(simple_text_config, project_dir)
|
||||
assert result1.built == ["output.txt"]
|
||||
|
||||
result2 = await run_build(simple_text_config, project_dir)
|
||||
assert result2.skipped == ["output.txt"]
|
||||
assert result2.built == []
|
||||
|
||||
async def test_rebuild_after_prompt_change(
|
||||
self, project_dir: Path, write_config: WriteConfig
|
||||
) -> None:
|
||||
config1 = write_config({"targets": {"out.txt": {"prompt": "version 1"}}})
|
||||
with patch("bulkgen.builder._create_providers", return_value=_fake_providers()):
|
||||
r1 = await run_build(config1, project_dir)
|
||||
assert r1.built == ["out.txt"]
|
||||
|
||||
config2 = write_config({"targets": {"out.txt": {"prompt": "version 2"}}})
|
||||
r2 = await run_build(config2, project_dir)
|
||||
assert r2.built == ["out.txt"]
|
||||
|
||||
async def test_rebuild_after_input_change(
|
||||
self, project_dir: Path, write_config: WriteConfig
|
||||
) -> None:
|
||||
_ = (project_dir / "data.txt").write_text("original")
|
||||
config = write_config(
|
||||
{"targets": {"out.md": {"prompt": "x", "inputs": ["data.txt"]}}}
|
||||
)
|
||||
with patch("bulkgen.builder._create_providers", return_value=_fake_providers()):
|
||||
r1 = await run_build(config, project_dir)
|
||||
assert r1.built == ["out.md"]
|
||||
|
||||
_ = (project_dir / "data.txt").write_text("modified")
|
||||
r2 = await run_build(config, project_dir)
|
||||
assert r2.built == ["out.md"]
|
||||
|
||||
async def test_selective_build_single_target(
|
||||
self, project_dir: Path, multi_target_config: ProjectConfig
|
||||
) -> None:
|
||||
with patch("bulkgen.builder._create_providers", return_value=_fake_providers()):
|
||||
result = await run_build(
|
||||
multi_target_config, project_dir, target="summary.md"
|
||||
)
|
||||
|
||||
assert "summary.md" in result.built
|
||||
assert "hero.png" not in result.built
|
||||
assert "final.txt" not in result.built
|
||||
|
||||
async def test_selective_build_unknown_target_raises(
|
||||
self, project_dir: Path, simple_text_config: ProjectConfig
|
||||
) -> None:
|
||||
with patch("bulkgen.builder._create_providers", return_value=_fake_providers()):
|
||||
with pytest.raises(ValueError, match="Unknown target"):
|
||||
_ = await run_build(
|
||||
simple_text_config, project_dir, target="nonexistent.txt"
|
||||
)
|
||||
|
||||
async def test_failed_target_isolates_independent(
|
||||
self, project_dir: Path, write_config: WriteConfig
|
||||
) -> None:
|
||||
config = write_config(
|
||||
{
|
||||
"targets": {
|
||||
"fail.txt": {"prompt": "will fail"},
|
||||
"ok.txt": {"prompt": "will succeed"},
|
||||
}
|
||||
}
|
||||
)
|
||||
fail_provider = FailingProvider()
|
||||
fake_provider = FakeProvider()
|
||||
|
||||
async def selective_generate(
|
||||
target_name: str,
|
||||
target_config: TargetConfig,
|
||||
resolved_prompt: str,
|
||||
resolved_model: str,
|
||||
project_dir: Path,
|
||||
) -> None:
|
||||
if target_name == "fail.txt":
|
||||
await fail_provider.generate(
|
||||
target_name,
|
||||
target_config,
|
||||
resolved_prompt,
|
||||
resolved_model,
|
||||
project_dir,
|
||||
)
|
||||
else:
|
||||
await fake_provider.generate(
|
||||
target_name,
|
||||
target_config,
|
||||
resolved_prompt,
|
||||
resolved_model,
|
||||
project_dir,
|
||||
)
|
||||
|
||||
routing_provider = FakeProvider()
|
||||
routing_provider.generate = selective_generate # type: ignore[assignment]
|
||||
|
||||
providers_dict: dict[TargetType, Provider] = {
|
||||
TargetType.TEXT: routing_provider,
|
||||
TargetType.IMAGE: routing_provider,
|
||||
}
|
||||
|
||||
with patch("bulkgen.builder._create_providers", return_value=providers_dict):
|
||||
result = await run_build(config, project_dir)
|
||||
|
||||
assert "fail.txt" in result.failed
|
||||
assert "ok.txt" in result.built
|
||||
|
||||
async def test_failed_dep_cascades(
|
||||
self, project_dir: Path, write_config: WriteConfig
|
||||
) -> None:
|
||||
config = write_config(
|
||||
{
|
||||
"targets": {
|
||||
"base.txt": {"prompt": "base"},
|
||||
"child.txt": {"prompt": "child", "inputs": ["base.txt"]},
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
with patch("bulkgen.builder._create_providers") as mock_cp:
|
||||
mock_cp.return_value = {
|
||||
TargetType.TEXT: FailingProvider(),
|
||||
TargetType.IMAGE: FakeProvider(),
|
||||
}
|
||||
result = await run_build(config, project_dir)
|
||||
|
||||
assert "base.txt" in result.failed
|
||||
assert "child.txt" in result.failed
|
||||
assert "Dependency failed" in result.failed["child.txt"]
|
||||
|
||||
async def test_missing_provider_records_failure(
|
||||
self, project_dir: Path, simple_text_config: ProjectConfig
|
||||
) -> None:
|
||||
with patch(
|
||||
"bulkgen.builder._create_providers",
|
||||
return_value={},
|
||||
):
|
||||
result = await run_build(simple_text_config, project_dir)
|
||||
|
||||
assert "output.txt" in result.failed
|
||||
assert "MISTRAL_API_KEY" in result.failed["output.txt"]
|
||||
|
||||
async def test_state_saved_after_each_generation(
|
||||
self, project_dir: Path, write_config: WriteConfig
|
||||
) -> None:
|
||||
config = write_config(
|
||||
{
|
||||
"targets": {
|
||||
"a.txt": {"prompt": "first"},
|
||||
"b.txt": {"prompt": "second", "inputs": ["a.txt"]},
|
||||
}
|
||||
}
|
||||
)
|
||||
with patch("bulkgen.builder._create_providers", return_value=_fake_providers()):
|
||||
_ = await run_build(config, project_dir)
|
||||
|
||||
state = load_state(project_dir)
|
||||
assert "a.txt" in state.targets
|
||||
assert "b.txt" in state.targets
|
||||
|
||||
async def test_prompt_file_resolution_in_build(
|
||||
self, project_dir: Path, prompt_file: Path, write_config: WriteConfig
|
||||
) -> None:
|
||||
config = write_config({"targets": {"out.txt": {"prompt": prompt_file.name}}})
|
||||
|
||||
with patch("bulkgen.builder._create_providers", return_value=_fake_providers()):
|
||||
result = await run_build(config, project_dir)
|
||||
|
||||
assert result.built == ["out.txt"]
|
||||
content = (project_dir / "out.txt").read_text()
|
||||
assert "This prompt comes from a file" in content
|
||||
|
||||
async def test_rebuild_after_output_deleted(
|
||||
self, project_dir: Path, simple_text_config: ProjectConfig
|
||||
) -> None:
|
||||
with patch("bulkgen.builder._create_providers", return_value=_fake_providers()):
|
||||
r1 = await run_build(simple_text_config, project_dir)
|
||||
assert r1.built == ["output.txt"]
|
||||
|
||||
(project_dir / "output.txt").unlink()
|
||||
|
||||
r2 = await run_build(simple_text_config, project_dir)
|
||||
assert r2.built == ["output.txt"]
|
||||
|
||||
async def test_diamond_dependency_all_built(
|
||||
self, project_dir: Path, write_config: WriteConfig
|
||||
) -> None:
|
||||
_ = (project_dir / "root.txt").write_text("root data")
|
||||
config = write_config(
|
||||
{
|
||||
"targets": {
|
||||
"left.md": {"prompt": "left", "inputs": ["root.txt"]},
|
||||
"right.md": {"prompt": "right", "inputs": ["root.txt"]},
|
||||
"merge.txt": {
|
||||
"prompt": "merge",
|
||||
"inputs": ["left.md", "right.md"],
|
||||
},
|
||||
}
|
||||
}
|
||||
)
|
||||
with patch("bulkgen.builder._create_providers", return_value=_fake_providers()):
|
||||
result = await run_build(config, project_dir)
|
||||
|
||||
assert set(result.built) == {"left.md", "right.md", "merge.txt"}
|
||||
assert result.failed == {}
|
||||
Loading…
Add table
Add a link
Reference in a new issue