feat: add OpenAI as provider for text and image generation
- Add openai_text.py: text generation via OpenAI chat completions API (gpt-4o, gpt-4o-mini, gpt-4.1, gpt-4.1-mini, gpt-4.1-nano, o3-mini) - Add openai_image.py: image generation via OpenAI images API (gpt-image-1 with reference image support, dall-e-3, dall-e-2) - Refactor builder provider dispatch from TargetType to model-name index to support multiple providers per target type - Fix circular import between config.py and providers/__init__.py using TYPE_CHECKING guard - Fix stale default model assertions in tests - Add openai>=1.0.0 dependency
This commit is contained in:
parent
d0dac5b1bf
commit
870023865d
9 changed files with 571 additions and 58 deletions
|
|
@ -9,16 +9,14 @@ from collections.abc import Callable
|
|||
from dataclasses import dataclass, field
|
||||
from pathlib import Path
|
||||
|
||||
from bulkgen.config import (
|
||||
ProjectConfig,
|
||||
TargetType,
|
||||
target_type_from_capabilities,
|
||||
)
|
||||
from bulkgen.config import ProjectConfig
|
||||
from bulkgen.graph import build_graph, get_build_order, get_subgraph_for_target
|
||||
from bulkgen.providers import Provider
|
||||
from bulkgen.providers.blackforest import BlackForestProvider
|
||||
from bulkgen.providers.mistral import MistralProvider
|
||||
from bulkgen.resolve import infer_required_capabilities, resolve_model
|
||||
from bulkgen.providers.openai_image import OpenAIImageProvider
|
||||
from bulkgen.providers.openai_text import OpenAITextProvider
|
||||
from bulkgen.resolve import resolve_model
|
||||
from bulkgen.state import (
|
||||
BuildState,
|
||||
is_target_dirty,
|
||||
|
|
@ -100,32 +98,43 @@ def _collect_all_deps(target_name: str, config: ProjectConfig) -> list[str]:
|
|||
return deps
|
||||
|
||||
|
||||
def _create_providers() -> dict[TargetType, Provider]:
|
||||
def _create_providers() -> list[Provider]:
|
||||
"""Create provider instances from environment variables."""
|
||||
providers: dict[TargetType, Provider] = {}
|
||||
providers: list[Provider] = []
|
||||
bfl_key = os.environ.get("BFL_API_KEY", "")
|
||||
if bfl_key:
|
||||
providers[TargetType.IMAGE] = BlackForestProvider(api_key=bfl_key)
|
||||
providers.append(BlackForestProvider(api_key=bfl_key))
|
||||
mistral_key = os.environ.get("MISTRAL_API_KEY", "")
|
||||
if mistral_key:
|
||||
providers[TargetType.TEXT] = MistralProvider(api_key=mistral_key)
|
||||
providers.append(MistralProvider(api_key=mistral_key))
|
||||
openai_key = os.environ.get("OPENAI_API_KEY", "")
|
||||
if openai_key:
|
||||
providers.append(OpenAITextProvider(api_key=openai_key))
|
||||
providers.append(OpenAIImageProvider(api_key=openai_key))
|
||||
return providers
|
||||
|
||||
|
||||
def _build_provider_index(providers: list[Provider]) -> dict[str, Provider]:
|
||||
"""Build a model-name → provider lookup from a list of providers."""
|
||||
index: dict[str, Provider] = {}
|
||||
for provider in providers:
|
||||
for model in provider.get_provided_models():
|
||||
index[model.name] = provider
|
||||
return index
|
||||
|
||||
|
||||
async def _build_single_target(
|
||||
target_name: str,
|
||||
config: ProjectConfig,
|
||||
project_dir: Path,
|
||||
providers: dict[TargetType, Provider],
|
||||
provider_index: dict[str, Provider],
|
||||
) -> None:
|
||||
"""Build a single target by dispatching to the appropriate provider."""
|
||||
target_cfg = config.targets[target_name]
|
||||
model_info = resolve_model(target_name, target_cfg, config.defaults)
|
||||
required = infer_required_capabilities(target_name, target_cfg)
|
||||
target_type = target_type_from_capabilities(required)
|
||||
resolved_prompt = _resolve_prompt(target_cfg.prompt, project_dir)
|
||||
|
||||
provider = providers[target_type]
|
||||
provider = provider_index[model_info.name]
|
||||
await provider.generate(
|
||||
target_name=target_name,
|
||||
target_config=target_cfg,
|
||||
|
|
@ -152,6 +161,7 @@ async def run_build(
|
|||
"""
|
||||
result = BuildResult()
|
||||
providers = _create_providers()
|
||||
provider_index = _build_provider_index(providers)
|
||||
|
||||
graph = build_graph(config, project_dir)
|
||||
|
||||
|
|
@ -181,7 +191,7 @@ async def run_build(
|
|||
continue
|
||||
|
||||
if _is_dirty(name, config, project_dir, state):
|
||||
if not _has_provider(name, config, providers, result, on_progress):
|
||||
if not _has_provider(name, config, provider_index, result, on_progress):
|
||||
continue
|
||||
dirty_targets.append(name)
|
||||
else:
|
||||
|
|
@ -195,7 +205,7 @@ async def run_build(
|
|||
on_progress(BuildEvent.TARGET_BUILDING, name, "")
|
||||
|
||||
outcomes = await _build_generation(
|
||||
dirty_targets, config, project_dir, providers
|
||||
dirty_targets, config, project_dir, provider_index
|
||||
)
|
||||
|
||||
_process_outcomes(outcomes, config, project_dir, state, result, on_progress)
|
||||
|
|
@ -238,19 +248,15 @@ def _is_dirty(
|
|||
def _has_provider(
|
||||
target_name: str,
|
||||
config: ProjectConfig,
|
||||
providers: dict[TargetType, Provider],
|
||||
provider_index: dict[str, Provider],
|
||||
result: BuildResult,
|
||||
on_progress: ProgressCallback = _noop_callback,
|
||||
) -> bool:
|
||||
"""Check that the required provider is available; record failure if not."""
|
||||
target_cfg = config.targets[target_name]
|
||||
required = infer_required_capabilities(target_name, target_cfg)
|
||||
target_type = target_type_from_capabilities(required)
|
||||
if target_type not in providers:
|
||||
env_var = (
|
||||
"BFL_API_KEY" if target_type is TargetType.IMAGE else "MISTRAL_API_KEY"
|
||||
)
|
||||
msg = f"Missing {env_var} environment variable"
|
||||
model_info = resolve_model(target_name, target_cfg, config.defaults)
|
||||
if model_info.name not in provider_index:
|
||||
msg = f"No provider available for model '{model_info.name}' (provider: {model_info.provider}) — check API key environment variables"
|
||||
result.failed[target_name] = msg
|
||||
on_progress(BuildEvent.TARGET_NO_PROVIDER, target_name, msg)
|
||||
return False
|
||||
|
|
@ -261,13 +267,13 @@ async def _build_generation(
|
|||
dirty_targets: list[str],
|
||||
config: ProjectConfig,
|
||||
project_dir: Path,
|
||||
providers: dict[TargetType, Provider],
|
||||
provider_index: dict[str, Provider],
|
||||
) -> list[tuple[str, Exception | None]]:
|
||||
"""Build all dirty targets in a generation concurrently."""
|
||||
|
||||
async def _build_one(name: str) -> tuple[str, Exception | None]:
|
||||
try:
|
||||
await _build_single_target(name, config, project_dir, providers)
|
||||
await _build_single_target(name, config, project_dir, provider_index)
|
||||
except Exception as exc: # noqa: BLE001
|
||||
return (name, exc)
|
||||
return (name, None)
|
||||
|
|
|
|||
|
|
@ -4,10 +4,13 @@ from __future__ import annotations
|
|||
|
||||
import abc
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from bulkgen.config import TargetConfig
|
||||
from bulkgen.providers.models import ModelInfo
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from bulkgen.config import TargetConfig
|
||||
|
||||
|
||||
class Provider(abc.ABC):
|
||||
"""Abstract base for generation providers."""
|
||||
|
|
|
|||
194
bulkgen/providers/openai_image.py
Normal file
194
bulkgen/providers/openai_image.py
Normal file
|
|
@ -0,0 +1,194 @@
|
|||
"""OpenAI image generation provider."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import base64
|
||||
from pathlib import Path
|
||||
from typing import Literal, override
|
||||
|
||||
import httpx
|
||||
from openai import AsyncOpenAI
|
||||
from openai.types.images_response import ImagesResponse
|
||||
|
||||
from bulkgen.config import TargetConfig
|
||||
from bulkgen.providers import Provider
|
||||
from bulkgen.providers.models import Capability, ModelInfo
|
||||
|
||||
_SIZE = Literal[
|
||||
"auto",
|
||||
"1024x1024",
|
||||
"1024x1536",
|
||||
"1536x1024",
|
||||
"1024x1792",
|
||||
"1792x1024",
|
||||
"256x256",
|
||||
"512x512",
|
||||
]
|
||||
|
||||
_VALID_SIZES: frozenset[str] = frozenset(
|
||||
{
|
||||
"auto",
|
||||
"1024x1024",
|
||||
"1024x1536",
|
||||
"1536x1024",
|
||||
"1024x1792",
|
||||
"1792x1024",
|
||||
"256x256",
|
||||
"512x512",
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def _build_size(width: int | None, height: int | None) -> _SIZE | None:
|
||||
"""Convert width/height to an OpenAI size string, or *None* for the default."""
|
||||
if width is None and height is None:
|
||||
return None
|
||||
w = width or 1024
|
||||
h = height or 1024
|
||||
size = f"{w}x{h}"
|
||||
if size not in _VALID_SIZES:
|
||||
msg = f"Unsupported OpenAI image size '{size}'. Valid sizes: {', '.join(sorted(_VALID_SIZES))}"
|
||||
raise ValueError(msg)
|
||||
return size # pyright: ignore[reportReturnType]
|
||||
|
||||
|
||||
class OpenAIImageProvider(Provider):
|
||||
"""Generates images via the OpenAI API."""
|
||||
|
||||
_api_key: str
|
||||
|
||||
def __init__(self, api_key: str) -> None:
|
||||
self._api_key = api_key
|
||||
|
||||
@staticmethod
|
||||
@override
|
||||
def get_provided_models() -> list[ModelInfo]:
|
||||
return [
|
||||
ModelInfo(
|
||||
name="gpt-image-1",
|
||||
provider="OpenAI",
|
||||
type="image",
|
||||
capabilities=[
|
||||
Capability.TEXT_TO_IMAGE,
|
||||
Capability.REFERENCE_IMAGES,
|
||||
],
|
||||
),
|
||||
ModelInfo(
|
||||
name="dall-e-3",
|
||||
provider="OpenAI",
|
||||
type="image",
|
||||
capabilities=[Capability.TEXT_TO_IMAGE],
|
||||
),
|
||||
ModelInfo(
|
||||
name="dall-e-2",
|
||||
provider="OpenAI",
|
||||
type="image",
|
||||
capabilities=[Capability.TEXT_TO_IMAGE],
|
||||
),
|
||||
]
|
||||
|
||||
@override
|
||||
async def generate(
|
||||
self,
|
||||
target_name: str,
|
||||
target_config: TargetConfig,
|
||||
resolved_prompt: str,
|
||||
resolved_model: ModelInfo,
|
||||
project_dir: Path,
|
||||
) -> None:
|
||||
output_path = project_dir / target_name
|
||||
size = _build_size(target_config.width, target_config.height)
|
||||
|
||||
async with AsyncOpenAI(api_key=self._api_key) as client:
|
||||
if target_config.reference_images:
|
||||
response = await _generate_edit(
|
||||
client,
|
||||
resolved_prompt,
|
||||
resolved_model.name,
|
||||
target_config.reference_images,
|
||||
project_dir,
|
||||
size,
|
||||
)
|
||||
else:
|
||||
response = await _generate_new(
|
||||
client,
|
||||
resolved_prompt,
|
||||
resolved_model.name,
|
||||
size,
|
||||
)
|
||||
|
||||
image_data = _extract_image_bytes(response, resolved_model.name)
|
||||
_ = output_path.write_bytes(image_data)
|
||||
|
||||
|
||||
async def _generate_new(
|
||||
client: AsyncOpenAI,
|
||||
prompt: str,
|
||||
model: str,
|
||||
size: _SIZE | None,
|
||||
) -> ImagesResponse:
|
||||
"""Generate a new image from a text prompt."""
|
||||
if size is not None:
|
||||
return await client.images.generate(
|
||||
prompt=prompt,
|
||||
model=model,
|
||||
n=1,
|
||||
response_format="b64_json",
|
||||
size=size,
|
||||
)
|
||||
return await client.images.generate(
|
||||
prompt=prompt,
|
||||
model=model,
|
||||
n=1,
|
||||
response_format="b64_json",
|
||||
)
|
||||
|
||||
|
||||
async def _generate_edit(
|
||||
client: AsyncOpenAI,
|
||||
prompt: str,
|
||||
model: str,
|
||||
reference_images: list[str],
|
||||
project_dir: Path,
|
||||
size: _SIZE | None,
|
||||
) -> ImagesResponse:
|
||||
"""Generate an image using a reference image via the edits endpoint."""
|
||||
ref_path = project_dir / reference_images[0]
|
||||
image_bytes = ref_path.read_bytes()
|
||||
|
||||
if size is not None:
|
||||
return await client.images.edit(
|
||||
image=image_bytes,
|
||||
prompt=prompt,
|
||||
model=model,
|
||||
n=1,
|
||||
response_format="b64_json",
|
||||
size=size, # pyright: ignore[reportArgumentType]
|
||||
)
|
||||
return await client.images.edit(
|
||||
image=image_bytes,
|
||||
prompt=prompt,
|
||||
model=model,
|
||||
n=1,
|
||||
response_format="b64_json",
|
||||
)
|
||||
|
||||
|
||||
def _extract_image_bytes(response: ImagesResponse, model: str) -> bytes:
|
||||
"""Extract image bytes from an OpenAI images response."""
|
||||
if not response.data:
|
||||
msg = f"OpenAI {model} returned no images"
|
||||
raise RuntimeError(msg)
|
||||
|
||||
image = response.data[0]
|
||||
|
||||
if image.b64_json is not None:
|
||||
return base64.b64decode(image.b64_json)
|
||||
|
||||
if image.url is not None:
|
||||
resp = httpx.get(image.url)
|
||||
_ = resp.raise_for_status()
|
||||
return resp.content
|
||||
|
||||
msg = f"OpenAI {model} returned neither b64_json nor url"
|
||||
raise RuntimeError(msg)
|
||||
169
bulkgen/providers/openai_text.py
Normal file
169
bulkgen/providers/openai_text.py
Normal file
|
|
@ -0,0 +1,169 @@
|
|||
"""OpenAI text generation provider."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import base64
|
||||
import mimetypes
|
||||
from pathlib import Path
|
||||
from typing import override
|
||||
|
||||
from openai import AsyncOpenAI
|
||||
from openai.types.chat import (
|
||||
ChatCompletionContentPartImageParam,
|
||||
ChatCompletionContentPartParam,
|
||||
ChatCompletionContentPartTextParam,
|
||||
ChatCompletionUserMessageParam,
|
||||
)
|
||||
|
||||
from bulkgen.config import IMAGE_EXTENSIONS, TargetConfig
|
||||
from bulkgen.providers import Provider
|
||||
from bulkgen.providers.models import Capability, ModelInfo
|
||||
|
||||
|
||||
def _image_to_data_url(path: Path) -> str:
|
||||
"""Read an image file and return a ``data:`` URL with base64-encoded content."""
|
||||
mime = mimetypes.guess_type(path.name)[0] or "image/png"
|
||||
b64 = base64.b64encode(path.read_bytes()).decode("ascii")
|
||||
return f"data:{mime};base64,{b64}"
|
||||
|
||||
|
||||
class OpenAITextProvider(Provider):
|
||||
"""Generates text via the OpenAI API."""
|
||||
|
||||
_api_key: str
|
||||
|
||||
def __init__(self, api_key: str) -> None:
|
||||
self._api_key = api_key
|
||||
|
||||
@staticmethod
|
||||
@override
|
||||
def get_provided_models() -> list[ModelInfo]:
|
||||
return [
|
||||
ModelInfo(
|
||||
name="gpt-4o",
|
||||
provider="OpenAI",
|
||||
type="text",
|
||||
capabilities=[Capability.TEXT_GENERATION, Capability.VISION],
|
||||
),
|
||||
ModelInfo(
|
||||
name="gpt-4o-mini",
|
||||
provider="OpenAI",
|
||||
type="text",
|
||||
capabilities=[Capability.TEXT_GENERATION, Capability.VISION],
|
||||
),
|
||||
ModelInfo(
|
||||
name="gpt-4.1",
|
||||
provider="OpenAI",
|
||||
type="text",
|
||||
capabilities=[Capability.TEXT_GENERATION, Capability.VISION],
|
||||
),
|
||||
ModelInfo(
|
||||
name="gpt-4.1-mini",
|
||||
provider="OpenAI",
|
||||
type="text",
|
||||
capabilities=[Capability.TEXT_GENERATION, Capability.VISION],
|
||||
),
|
||||
ModelInfo(
|
||||
name="gpt-4.1-nano",
|
||||
provider="OpenAI",
|
||||
type="text",
|
||||
capabilities=[Capability.TEXT_GENERATION, Capability.VISION],
|
||||
),
|
||||
ModelInfo(
|
||||
name="o3-mini",
|
||||
provider="OpenAI",
|
||||
type="text",
|
||||
capabilities=[Capability.TEXT_GENERATION],
|
||||
),
|
||||
]
|
||||
|
||||
@override
|
||||
async def generate(
|
||||
self,
|
||||
target_name: str,
|
||||
target_config: TargetConfig,
|
||||
resolved_prompt: str,
|
||||
resolved_model: ModelInfo,
|
||||
project_dir: Path,
|
||||
) -> None:
|
||||
output_path = project_dir / target_name
|
||||
|
||||
all_input_names = list(target_config.inputs) + list(
|
||||
target_config.reference_images
|
||||
)
|
||||
|
||||
has_images = any(
|
||||
(project_dir / name).suffix.lower() in IMAGE_EXTENSIONS
|
||||
for name in all_input_names
|
||||
)
|
||||
|
||||
if has_images:
|
||||
message = _build_multimodal_message(
|
||||
resolved_prompt, all_input_names, project_dir
|
||||
)
|
||||
else:
|
||||
message = _build_text_message(resolved_prompt, all_input_names, project_dir)
|
||||
|
||||
async with AsyncOpenAI(api_key=self._api_key) as client:
|
||||
response = await client.chat.completions.create(
|
||||
model=resolved_model.name,
|
||||
messages=[message],
|
||||
)
|
||||
|
||||
if not response.choices:
|
||||
msg = f"OpenAI API returned no choices for target '{target_name}'"
|
||||
raise RuntimeError(msg)
|
||||
|
||||
content = response.choices[0].message.content
|
||||
if content is None:
|
||||
msg = f"OpenAI API returned empty content for target '{target_name}'"
|
||||
raise RuntimeError(msg)
|
||||
|
||||
_ = output_path.write_text(content)
|
||||
|
||||
|
||||
def _build_text_message(
|
||||
prompt: str,
|
||||
input_names: list[str],
|
||||
project_dir: Path,
|
||||
) -> ChatCompletionUserMessageParam:
|
||||
"""Build a plain-text message (no images)."""
|
||||
parts: list[str] = [prompt]
|
||||
for name in input_names:
|
||||
file_content = (project_dir / name).read_text()
|
||||
parts.append(f"\n--- Contents of {name} ---\n{file_content}")
|
||||
return {"role": "user", "content": "\n".join(parts)}
|
||||
|
||||
|
||||
def _build_multimodal_message(
|
||||
prompt: str,
|
||||
input_names: list[str],
|
||||
project_dir: Path,
|
||||
) -> ChatCompletionUserMessageParam:
|
||||
"""Build a multimodal message with text and image parts."""
|
||||
parts: list[ChatCompletionContentPartParam] = [
|
||||
ChatCompletionContentPartTextParam(type="text", text=prompt),
|
||||
]
|
||||
|
||||
for name in input_names:
|
||||
input_path = project_dir / name
|
||||
suffix = input_path.suffix.lower()
|
||||
|
||||
if suffix in IMAGE_EXTENSIONS:
|
||||
data_url = _image_to_data_url(input_path)
|
||||
parts.append(
|
||||
ChatCompletionContentPartImageParam(
|
||||
type="image_url",
|
||||
image_url={"url": data_url},
|
||||
)
|
||||
)
|
||||
else:
|
||||
file_content = input_path.read_text()
|
||||
parts.append(
|
||||
ChatCompletionContentPartTextParam(
|
||||
type="text",
|
||||
text=f"\n--- Contents of {name} ---\n{file_content}",
|
||||
)
|
||||
)
|
||||
|
||||
return {"role": "user", "content": parts}
|
||||
|
|
@ -9,8 +9,12 @@ def get_all_models() -> list[ModelInfo]:
|
|||
"""Return the merged list of models from all providers."""
|
||||
from bulkgen.providers.blackforest import BlackForestProvider
|
||||
from bulkgen.providers.mistral import MistralProvider
|
||||
from bulkgen.providers.openai_image import OpenAIImageProvider
|
||||
from bulkgen.providers.openai_text import OpenAITextProvider
|
||||
|
||||
return (
|
||||
MistralProvider.get_provided_models()
|
||||
+ BlackForestProvider.get_provided_models()
|
||||
+ OpenAITextProvider.get_provided_models()
|
||||
+ OpenAIImageProvider.get_provided_models()
|
||||
)
|
||||
|
|
|
|||
|
|
@ -9,6 +9,7 @@ dependencies = [
|
|||
"httpx>=0.27.0",
|
||||
"mistralai>=1.0.0",
|
||||
"networkx>=3.6.1",
|
||||
"openai>=1.0.0",
|
||||
"pydantic>=2.12.5",
|
||||
"pyyaml>=6.0",
|
||||
"typer>=0.23.1",
|
||||
|
|
|
|||
|
|
@ -16,21 +16,40 @@ from bulkgen.builder import (
|
|||
_resolve_prompt, # pyright: ignore[reportPrivateUsage]
|
||||
run_build,
|
||||
)
|
||||
from bulkgen.config import ProjectConfig, TargetConfig, TargetType
|
||||
from bulkgen.config import ProjectConfig, TargetConfig
|
||||
from bulkgen.providers import Provider
|
||||
from bulkgen.providers.models import ModelInfo
|
||||
from bulkgen.providers.models import Capability, ModelInfo
|
||||
from bulkgen.state import load_state
|
||||
|
||||
WriteConfig = Callable[[dict[str, object]], ProjectConfig]
|
||||
|
||||
|
||||
class FakeProvider(Provider):
|
||||
"""A provider that writes a marker file instead of calling an API."""
|
||||
_FAKE_TEXT_MODELS = [
|
||||
ModelInfo(
|
||||
name="pixtral-large-latest",
|
||||
provider="Fake",
|
||||
type="text",
|
||||
capabilities=[Capability.TEXT_GENERATION, Capability.VISION],
|
||||
),
|
||||
]
|
||||
|
||||
_FAKE_IMAGE_MODELS = [
|
||||
ModelInfo(
|
||||
name="flux-2-pro",
|
||||
provider="Fake",
|
||||
type="image",
|
||||
capabilities=[Capability.TEXT_TO_IMAGE, Capability.REFERENCE_IMAGES],
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
class FakeTextProvider(Provider):
|
||||
"""A text provider that writes a marker file instead of calling an API."""
|
||||
|
||||
@staticmethod
|
||||
@override
|
||||
def get_provided_models() -> list[ModelInfo]:
|
||||
return []
|
||||
return _FAKE_TEXT_MODELS
|
||||
|
||||
@override
|
||||
async def generate(
|
||||
|
|
@ -45,13 +64,34 @@ class FakeProvider(Provider):
|
|||
_ = output.write_text(f"generated:{target_name}:{resolved_prompt}")
|
||||
|
||||
|
||||
class FailingProvider(Provider):
|
||||
"""A provider that always raises."""
|
||||
class FakeImageProvider(Provider):
|
||||
"""An image provider that writes a marker file instead of calling an API."""
|
||||
|
||||
@staticmethod
|
||||
@override
|
||||
def get_provided_models() -> list[ModelInfo]:
|
||||
return []
|
||||
return _FAKE_IMAGE_MODELS
|
||||
|
||||
@override
|
||||
async def generate(
|
||||
self,
|
||||
target_name: str,
|
||||
target_config: TargetConfig,
|
||||
resolved_prompt: str,
|
||||
resolved_model: ModelInfo,
|
||||
project_dir: Path,
|
||||
) -> None:
|
||||
output = project_dir / target_name
|
||||
_ = output.write_text(f"generated:{target_name}:{resolved_prompt}")
|
||||
|
||||
|
||||
class FailingTextProvider(Provider):
|
||||
"""A text provider that always raises."""
|
||||
|
||||
@staticmethod
|
||||
@override
|
||||
def get_provided_models() -> list[ModelInfo]:
|
||||
return _FAKE_TEXT_MODELS
|
||||
|
||||
@override
|
||||
async def generate(
|
||||
|
|
@ -66,11 +106,8 @@ class FailingProvider(Provider):
|
|||
raise RuntimeError(msg)
|
||||
|
||||
|
||||
def _fake_providers() -> dict[TargetType, Provider]:
|
||||
return {
|
||||
TargetType.TEXT: FakeProvider(),
|
||||
TargetType.IMAGE: FakeProvider(),
|
||||
}
|
||||
def _fake_providers() -> list[Provider]:
|
||||
return [FakeTextProvider(), FakeImageProvider()]
|
||||
|
||||
|
||||
class TestResolvePrompt:
|
||||
|
|
@ -251,8 +288,8 @@ class TestRunBuild:
|
|||
}
|
||||
}
|
||||
)
|
||||
fail_provider = FailingProvider()
|
||||
fake_provider = FakeProvider()
|
||||
fail_provider = FailingTextProvider()
|
||||
fake_provider = FakeTextProvider()
|
||||
|
||||
async def selective_generate(
|
||||
target_name: str,
|
||||
|
|
@ -278,15 +315,13 @@ class TestRunBuild:
|
|||
project_dir,
|
||||
)
|
||||
|
||||
routing_provider = FakeProvider()
|
||||
routing_provider = FakeTextProvider()
|
||||
routing_provider.generate = selective_generate # type: ignore[assignment]
|
||||
|
||||
providers_dict: dict[TargetType, Provider] = {
|
||||
TargetType.TEXT: routing_provider,
|
||||
TargetType.IMAGE: routing_provider,
|
||||
}
|
||||
|
||||
with patch("bulkgen.builder._create_providers", return_value=providers_dict):
|
||||
with patch(
|
||||
"bulkgen.builder._create_providers",
|
||||
return_value=[routing_provider, FakeImageProvider()],
|
||||
):
|
||||
result = await run_build(config, project_dir)
|
||||
|
||||
assert "fail.txt" in result.failed
|
||||
|
|
@ -304,11 +339,10 @@ class TestRunBuild:
|
|||
}
|
||||
)
|
||||
|
||||
with patch("bulkgen.builder._create_providers") as mock_cp:
|
||||
mock_cp.return_value = {
|
||||
TargetType.TEXT: FailingProvider(),
|
||||
TargetType.IMAGE: FakeProvider(),
|
||||
}
|
||||
with patch(
|
||||
"bulkgen.builder._create_providers",
|
||||
return_value=[FailingTextProvider(), FakeImageProvider()],
|
||||
):
|
||||
result = await run_build(config, project_dir)
|
||||
|
||||
assert "base.txt" in result.failed
|
||||
|
|
@ -320,12 +354,12 @@ class TestRunBuild:
|
|||
) -> None:
|
||||
with patch(
|
||||
"bulkgen.builder._create_providers",
|
||||
return_value={},
|
||||
return_value=[],
|
||||
):
|
||||
result = await run_build(simple_text_config, project_dir)
|
||||
|
||||
assert "output.txt" in result.failed
|
||||
assert "MISTRAL_API_KEY" in result.failed["output.txt"]
|
||||
assert "No provider available" in result.failed["output.txt"]
|
||||
|
||||
async def test_state_saved_after_each_generation(
|
||||
self, project_dir: Path, write_config: WriteConfig
|
||||
|
|
|
|||
|
|
@ -22,8 +22,8 @@ class TestLoadConfig:
|
|||
|
||||
assert "out.txt" in config.targets
|
||||
assert config.targets["out.txt"].prompt == "hello"
|
||||
assert config.defaults.text_model == "mistral-large-latest"
|
||||
assert config.defaults.image_model == "flux-pro-1.1"
|
||||
assert config.defaults.text_model == "pixtral-large-latest"
|
||||
assert config.defaults.image_model == "flux-2-pro"
|
||||
|
||||
def test_full_config_with_all_fields(self, project_dir: Path) -> None:
|
||||
raw = {
|
||||
|
|
|
|||
102
uv.lock
generated
102
uv.lock
generated
|
|
@ -53,6 +53,7 @@ dependencies = [
|
|||
{ name = "httpx" },
|
||||
{ name = "mistralai" },
|
||||
{ name = "networkx" },
|
||||
{ name = "openai" },
|
||||
{ name = "pydantic" },
|
||||
{ name = "pyyaml" },
|
||||
{ name = "typer" },
|
||||
|
|
@ -72,6 +73,7 @@ requires-dist = [
|
|||
{ name = "httpx", specifier = ">=0.27.0" },
|
||||
{ name = "mistralai", specifier = ">=1.0.0" },
|
||||
{ name = "networkx", specifier = ">=3.6.1" },
|
||||
{ name = "openai", specifier = ">=1.0.0" },
|
||||
{ name = "pydantic", specifier = ">=2.12.5" },
|
||||
{ name = "pyyaml", specifier = ">=6.0" },
|
||||
{ name = "typer", specifier = ">=0.23.1" },
|
||||
|
|
@ -156,6 +158,15 @@ wheels = [
|
|||
{ url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "distro"
|
||||
version = "1.9.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/fc/f8/98eea607f65de6527f8a2e8885fc8015d3e6f5775df186e443e0964a11c3/distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed", size = 60722, upload-time = "2023-12-24T09:54:32.31Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/12/b3/231ffd4ab1fc9d679809f356cebee130ac7daa00d6d6f3206dd4fd137e9e/distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2", size = 20277, upload-time = "2023-12-24T09:54:30.421Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "eval-type-backport"
|
||||
version = "0.3.1"
|
||||
|
|
@ -253,6 +264,57 @@ wheels = [
|
|||
{ url = "https://files.pythonhosted.org/packages/32/4b/b99e37f88336009971405cbb7630610322ed6fbfa31e1d7ab3fbf3049a2d/invoke-2.2.1-py3-none-any.whl", hash = "sha256:2413bc441b376e5cd3f55bb5d364f973ad8bdd7bf87e53c79de3c11bf3feecc8", size = 160287, upload-time = "2025-10-11T00:36:33.703Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "jiter"
|
||||
version = "0.13.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/0d/5e/4ec91646aee381d01cdb9974e30882c9cd3b8c5d1079d6b5ff4af522439a/jiter-0.13.0.tar.gz", hash = "sha256:f2839f9c2c7e2dffc1bc5929a510e14ce0a946be9365fd1219e7ef342dae14f4", size = 164847, upload-time = "2026-02-02T12:37:56.441Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/91/9c/7ee5a6ff4b9991e1a45263bfc46731634c4a2bde27dfda6c8251df2d958c/jiter-0.13.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1f8a55b848cbabf97d861495cd65f1e5c590246fabca8b48e1747c4dfc8f85bf", size = 306897, upload-time = "2026-02-02T12:36:16.748Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7c/02/be5b870d1d2be5dd6a91bdfb90f248fbb7dcbd21338f092c6b89817c3dbf/jiter-0.13.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f556aa591c00f2c45eb1b89f68f52441a016034d18b65da60e2d2875bbbf344a", size = 317507, upload-time = "2026-02-02T12:36:18.351Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/da/92/b25d2ec333615f5f284f3a4024f7ce68cfa0604c322c6808b2344c7f5d2b/jiter-0.13.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f7e1d61da332ec412350463891923f960c3073cf1aae93b538f0bb4c8cd46efb", size = 350560, upload-time = "2026-02-02T12:36:19.746Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/be/ec/74dcb99fef0aca9fbe56b303bf79f6bd839010cb18ad41000bf6cc71eec0/jiter-0.13.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3097d665a27bc96fd9bbf7f86178037db139f319f785e4757ce7ccbf390db6c2", size = 363232, upload-time = "2026-02-02T12:36:21.243Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1b/37/f17375e0bb2f6a812d4dd92d7616e41917f740f3e71343627da9db2824ce/jiter-0.13.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9d01ecc3a8cbdb6f25a37bd500510550b64ddf9f7d64a107d92f3ccb25035d0f", size = 483727, upload-time = "2026-02-02T12:36:22.688Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/77/d2/a71160a5ae1a1e66c1395b37ef77da67513b0adba73b993a27fbe47eb048/jiter-0.13.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ed9bbc30f5d60a3bdf63ae76beb3f9db280d7f195dfcfa61af792d6ce912d159", size = 370799, upload-time = "2026-02-02T12:36:24.106Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/01/99/ed5e478ff0eb4e8aa5fd998f9d69603c9fd3f32de3bd16c2b1194f68361c/jiter-0.13.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98fbafb6e88256f4454de33c1f40203d09fc33ed19162a68b3b257b29ca7f663", size = 359120, upload-time = "2026-02-02T12:36:25.519Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/16/be/7ffd08203277a813f732ba897352797fa9493faf8dc7995b31f3d9cb9488/jiter-0.13.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5467696f6b827f1116556cb0db620440380434591e93ecee7fd14d1a491b6daa", size = 390664, upload-time = "2026-02-02T12:36:26.866Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d1/84/e0787856196d6d346264d6dcccb01f741e5f0bd014c1d9a2ebe149caf4f3/jiter-0.13.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:2d08c9475d48b92892583df9da592a0e2ac49bcd41fae1fec4f39ba6cf107820", size = 513543, upload-time = "2026-02-02T12:36:28.217Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/65/50/ecbd258181c4313cf79bca6c88fb63207d04d5bf5e4f65174114d072aa55/jiter-0.13.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:aed40e099404721d7fcaf5b89bd3b4568a4666358bcac7b6b15c09fb6252ab68", size = 547262, upload-time = "2026-02-02T12:36:29.678Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/27/da/68f38d12e7111d2016cd198161b36e1f042bd115c169255bcb7ec823a3bf/jiter-0.13.0-cp313-cp313-win32.whl", hash = "sha256:36ebfbcffafb146d0e6ffb3e74d51e03d9c35ce7c625c8066cdbfc7b953bdc72", size = 200630, upload-time = "2026-02-02T12:36:31.808Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/25/65/3bd1a972c9a08ecd22eb3b08a95d1941ebe6938aea620c246cf426ae09c2/jiter-0.13.0-cp313-cp313-win_amd64.whl", hash = "sha256:8d76029f077379374cf0dbc78dbe45b38dec4a2eb78b08b5194ce836b2517afc", size = 202602, upload-time = "2026-02-02T12:36:33.679Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/15/fe/13bd3678a311aa67686bb303654792c48206a112068f8b0b21426eb6851e/jiter-0.13.0-cp313-cp313-win_arm64.whl", hash = "sha256:bb7613e1a427cfcb6ea4544f9ac566b93d5bf67e0d48c787eca673ff9c9dff2b", size = 185939, upload-time = "2026-02-02T12:36:35.065Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/49/19/a929ec002ad3228bc97ca01dbb14f7632fffdc84a95ec92ceaf4145688ae/jiter-0.13.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:fa476ab5dd49f3bf3a168e05f89358c75a17608dbabb080ef65f96b27c19ab10", size = 316616, upload-time = "2026-02-02T12:36:36.579Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/52/56/d19a9a194afa37c1728831e5fb81b7722c3de18a3109e8f282bfc23e587a/jiter-0.13.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ade8cb6ff5632a62b7dbd4757d8c5573f7a2e9ae285d6b5b841707d8363205ef", size = 346850, upload-time = "2026-02-02T12:36:38.058Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/36/4a/94e831c6bf287754a8a019cb966ed39ff8be6ab78cadecf08df3bb02d505/jiter-0.13.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9950290340acc1adaded363edd94baebcee7dabdfa8bee4790794cd5cfad2af6", size = 358551, upload-time = "2026-02-02T12:36:39.417Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a2/ec/a4c72c822695fa80e55d2b4142b73f0012035d9fcf90eccc56bc060db37c/jiter-0.13.0-cp313-cp313t-win_amd64.whl", hash = "sha256:2b4972c6df33731aac0742b64fd0d18e0a69bc7d6e03108ce7d40c85fd9e3e6d", size = 201950, upload-time = "2026-02-02T12:36:40.791Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b6/00/393553ec27b824fbc29047e9c7cd4a3951d7fbe4a76743f17e44034fa4e4/jiter-0.13.0-cp313-cp313t-win_arm64.whl", hash = "sha256:701a1e77d1e593c1b435315ff625fd071f0998c5f02792038a5ca98899261b7d", size = 185852, upload-time = "2026-02-02T12:36:42.077Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6e/f5/f1997e987211f6f9bd71b8083047b316208b4aca0b529bb5f8c96c89ef3e/jiter-0.13.0-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:cc5223ab19fe25e2f0bf2643204ad7318896fe3729bf12fde41b77bfc4fafff0", size = 308804, upload-time = "2026-02-02T12:36:43.496Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/cd/8f/5482a7677731fd44881f0204981ce2d7175db271f82cba2085dd2212e095/jiter-0.13.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:9776ebe51713acf438fd9b4405fcd86893ae5d03487546dae7f34993217f8a91", size = 318787, upload-time = "2026-02-02T12:36:45.071Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f3/b9/7257ac59778f1cd025b26a23c5520a36a424f7f1b068f2442a5b499b7464/jiter-0.13.0-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:879e768938e7b49b5e90b7e3fecc0dbec01b8cb89595861fb39a8967c5220d09", size = 353880, upload-time = "2026-02-02T12:36:47.365Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c3/87/719eec4a3f0841dad99e3d3604ee4cba36af4419a76f3cb0b8e2e691ad67/jiter-0.13.0-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:682161a67adea11e3aae9038c06c8b4a9a71023228767477d683f69903ebc607", size = 366702, upload-time = "2026-02-02T12:36:48.871Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d2/65/415f0a75cf6921e43365a1bc227c565cb949caca8b7532776e430cbaa530/jiter-0.13.0-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a13b68cd1cd8cc9de8f244ebae18ccb3e4067ad205220ef324c39181e23bbf66", size = 486319, upload-time = "2026-02-02T12:36:53.006Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/54/a2/9e12b48e82c6bbc6081fd81abf915e1443add1b13d8fc586e1d90bb02bb8/jiter-0.13.0-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87ce0f14c6c08892b610686ae8be350bf368467b6acd5085a5b65441e2bf36d2", size = 372289, upload-time = "2026-02-02T12:36:54.593Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4e/c1/e4693f107a1789a239c759a432e9afc592366f04e901470c2af89cfd28e1/jiter-0.13.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c365005b05505a90d1c47856420980d0237adf82f70c4aff7aebd3c1cc143ad", size = 360165, upload-time = "2026-02-02T12:36:56.112Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/17/08/91b9ea976c1c758240614bd88442681a87672eebc3d9a6dde476874e706b/jiter-0.13.0-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1317fdffd16f5873e46ce27d0e0f7f4f90f0cdf1d86bf6abeaea9f63ca2c401d", size = 389634, upload-time = "2026-02-02T12:36:57.495Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/18/23/58325ef99390d6d40427ed6005bf1ad54f2577866594bcf13ce55675f87d/jiter-0.13.0-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:c05b450d37ba0c9e21c77fef1f205f56bcee2330bddca68d344baebfc55ae0df", size = 514933, upload-time = "2026-02-02T12:36:58.909Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5b/25/69f1120c7c395fd276c3996bb8adefa9c6b84c12bb7111e5c6ccdcd8526d/jiter-0.13.0-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:775e10de3849d0631a97c603f996f518159272db00fdda0a780f81752255ee9d", size = 548842, upload-time = "2026-02-02T12:37:00.433Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/18/05/981c9669d86850c5fbb0d9e62bba144787f9fba84546ba43d624ee27ef29/jiter-0.13.0-cp314-cp314-win32.whl", hash = "sha256:632bf7c1d28421c00dd8bbb8a3bac5663e1f57d5cd5ed962bce3c73bf62608e6", size = 202108, upload-time = "2026-02-02T12:37:01.718Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8d/96/cdcf54dd0b0341db7d25413229888a346c7130bd20820530905fdb65727b/jiter-0.13.0-cp314-cp314-win_amd64.whl", hash = "sha256:f22ef501c3f87ede88f23f9b11e608581c14f04db59b6a801f354397ae13739f", size = 204027, upload-time = "2026-02-02T12:37:03.075Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fb/f9/724bcaaab7a3cd727031fe4f6995cb86c4bd344909177c186699c8dec51a/jiter-0.13.0-cp314-cp314-win_arm64.whl", hash = "sha256:07b75fe09a4ee8e0c606200622e571e44943f47254f95e2436c8bdcaceb36d7d", size = 187199, upload-time = "2026-02-02T12:37:04.414Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/62/92/1661d8b9fd6a3d7a2d89831db26fe3c1509a287d83ad7838831c7b7a5c7e/jiter-0.13.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:964538479359059a35fb400e769295d4b315ae61e4105396d355a12f7fef09f0", size = 318423, upload-time = "2026-02-02T12:37:05.806Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4f/3b/f77d342a54d4ebcd128e520fc58ec2f5b30a423b0fd26acdfc0c6fef8e26/jiter-0.13.0-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e104da1db1c0991b3eaed391ccd650ae8d947eab1480c733e5a3fb28d4313e40", size = 351438, upload-time = "2026-02-02T12:37:07.189Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/76/b3/ba9a69f0e4209bd3331470c723c2f5509e6f0482e416b612431a5061ed71/jiter-0.13.0-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0e3a5f0cde8ff433b8e88e41aa40131455420fb3649a3c7abdda6145f8cb7202", size = 364774, upload-time = "2026-02-02T12:37:08.579Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b3/16/6cdb31fa342932602458dbb631bfbd47f601e03d2e4950740e0b2100b570/jiter-0.13.0-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:57aab48f40be1db920a582b30b116fe2435d184f77f0e4226f546794cedd9cf0", size = 487238, upload-time = "2026-02-02T12:37:10.066Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ed/b1/956cc7abaca8d95c13aa8d6c9b3f3797241c246cd6e792934cc4c8b250d2/jiter-0.13.0-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7772115877c53f62beeb8fd853cab692dbc04374ef623b30f997959a4c0e7e95", size = 372892, upload-time = "2026-02-02T12:37:11.656Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/26/c4/97ecde8b1e74f67b8598c57c6fccf6df86ea7861ed29da84629cdbba76c4/jiter-0.13.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1211427574b17b633cfceba5040de8081e5abf114f7a7602f73d2e16f9fdaa59", size = 360309, upload-time = "2026-02-02T12:37:13.244Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4b/d7/eabe3cf46715854ccc80be2cd78dd4c36aedeb30751dbf85a1d08c14373c/jiter-0.13.0-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7beae3a3d3b5212d3a55d2961db3c292e02e302feb43fce6a3f7a31b90ea6dfe", size = 389607, upload-time = "2026-02-02T12:37:14.881Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/df/2d/03963fc0804e6109b82decfb9974eb92df3797fe7222428cae12f8ccaa0c/jiter-0.13.0-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:e5562a0f0e90a6223b704163ea28e831bd3a9faa3512a711f031611e6b06c939", size = 514986, upload-time = "2026-02-02T12:37:16.326Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f6/6c/8c83b45eb3eb1c1e18d841fe30b4b5bc5619d781267ca9bc03e005d8fd0a/jiter-0.13.0-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:6c26a424569a59140fb51160a56df13f438a2b0967365e987889186d5fc2f6f9", size = 548756, upload-time = "2026-02-02T12:37:17.736Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/47/66/eea81dfff765ed66c68fd2ed8c96245109e13c896c2a5015c7839c92367e/jiter-0.13.0-cp314-cp314t-win32.whl", hash = "sha256:24dc96eca9f84da4131cdf87a95e6ce36765c3b156fc9ae33280873b1c32d5f6", size = 201196, upload-time = "2026-02-02T12:37:19.101Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ff/32/4ac9c7a76402f8f00d00842a7f6b83b284d0cf7c1e9d4227bc95aa6d17fa/jiter-0.13.0-cp314-cp314t-win_amd64.whl", hash = "sha256:0a8d76c7524087272c8ae913f5d9d608bd839154b62c4322ef65723d2e5bb0b8", size = 204215, upload-time = "2026-02-02T12:37:20.495Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f9/8e/7def204fea9f9be8b3c21a6f2dd6c020cf56c7d5ff753e0e23ed7f9ea57e/jiter-0.13.0-cp314-cp314t-win_arm64.whl", hash = "sha256:2c26cf47e2cad140fa23b6d58d435a7c0161f5c514284802f25e87fddfe11024", size = 187152, upload-time = "2026-02-02T12:37:22.124Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "markdown-it-py"
|
||||
version = "4.0.0"
|
||||
|
|
@ -320,6 +382,25 @@ wheels = [
|
|||
{ url = "https://files.pythonhosted.org/packages/ab/c4/7532325f968ecfc078e8a028e69a52e4c3f95fb800906bf6931ac1e89e2b/nodejs_wheel_binaries-24.13.1-py2.py3-none-win_arm64.whl", hash = "sha256:caec398cb9e94c560bacdcba56b3828df22a355749eb291f47431af88cbf26dc", size = 38881194, upload-time = "2026-02-12T17:31:00.214Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "openai"
|
||||
version = "2.21.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "anyio" },
|
||||
{ name = "distro" },
|
||||
{ name = "httpx" },
|
||||
{ name = "jiter" },
|
||||
{ name = "pydantic" },
|
||||
{ name = "sniffio" },
|
||||
{ name = "tqdm" },
|
||||
{ name = "typing-extensions" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/92/e5/3d197a0947a166649f566706d7a4c8f7fe38f1fa7b24c9bcffe4c7591d44/openai-2.21.0.tar.gz", hash = "sha256:81b48ce4b8bbb2cc3af02047ceb19561f7b1dc0d4e52d1de7f02abfd15aa59b7", size = 644374, upload-time = "2026-02-14T00:12:01.577Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/cc/56/0a89092a453bb2c676d66abee44f863e742b2110d4dbb1dbcca3f7e5fc33/openai-2.21.0-py3-none-any.whl", hash = "sha256:0bc1c775e5b1536c294eded39ee08f8407656537ccc71b1004104fe1602e267c", size = 1103065, upload-time = "2026-02-14T00:11:59.603Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "opentelemetry-api"
|
||||
version = "1.39.1"
|
||||
|
|
@ -659,6 +740,27 @@ wheels = [
|
|||
{ url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050, upload-time = "2024-12-04T17:35:26.475Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sniffio"
|
||||
version = "1.3.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372, upload-time = "2024-02-25T23:20:04.057Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tqdm"
|
||||
version = "4.67.3"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "colorama", marker = "sys_platform == 'win32'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/09/a9/6ba95a270c6f1fbcd8dac228323f2777d886cb206987444e4bce66338dd4/tqdm-4.67.3.tar.gz", hash = "sha256:7d825f03f89244ef73f1d4ce193cb1774a8179fd96f31d7e1dcde62092b960bb", size = 169598, upload-time = "2026-02-03T17:35:53.048Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/16/e1/3079a9ff9b8e11b846c6ac5c8b5bfb7ff225eee721825310c91b3b50304f/tqdm-4.67.3-py3-none-any.whl", hash = "sha256:ee1e4c0e59148062281c49d80b25b67771a127c85fc9676d3be5f243206826bf", size = 78374, upload-time = "2026-02-03T17:35:50.982Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "typer"
|
||||
version = "0.23.1"
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue