Skip to content
This repository was archived by the owner on Mar 26, 2025. It is now read-only.
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 4 additions & 2 deletions conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import time
from io import BytesIO
from threading import Thread
from typing import AsyncGenerator, Callable, cast
from typing import AsyncGenerator, Callable, Generator, cast

import psutil
import pytest
Expand All @@ -23,7 +23,9 @@ def hatchet() -> Hatchet:


@pytest.fixture()
def worker(request: pytest.FixtureRequest):
def worker(
request: pytest.FixtureRequest,
) -> Generator[subprocess.Popen[bytes], None, None]:
example = cast(str, request.param)

command = ["poetry", "run", example]
Expand Down
28 changes: 13 additions & 15 deletions examples/bulk_fanout/bulk_trigger.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
from dotenv import load_dotenv

from hatchet_sdk import new_client
from hatchet_sdk.clients.admin import TriggerWorkflowOptions
from hatchet_sdk.clients.admin import TriggerWorkflowOptions, WorkflowRunDict
from hatchet_sdk.clients.rest.models.workflow_run import WorkflowRun
from hatchet_sdk.clients.run_event_listener import StepRunEventType

Expand All @@ -16,22 +16,20 @@ async def main() -> None:
load_dotenv()
hatchet = new_client()

workflowRuns: list[dict[str, Any]] = []

# we are going to run the BulkParent workflow 20 which will trigger the Child workflows n times for each n in range(20)
for i in range(20):
workflowRuns.append(
{
"workflow_name": "BulkParent",
"input": {"n": i},
"options": {
"additional_metadata": {
"bulk-trigger": i,
"hello-{i}": "earth-{i}",
},
},
}
workflowRuns = [
WorkflowRunDict(
workflow_name="BulkParent",
input={"n": i},
options=TriggerWorkflowOptions(
additional_metadata={
"bulk-trigger": str(i),
f"hello-{i}": f"earth-{i}",
}
),
)
for i in range(20)
]

workflowRunRefs = hatchet.admin.run_workflows(
workflowRuns,
Expand Down
5 changes: 3 additions & 2 deletions hatchet_sdk/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -137,8 +137,9 @@
from .clients.run_event_listener import StepRunEventType, WorkflowRunEventType
from .context.context import Context
from .context.worker_context import WorkerContext
from .hatchet import ClientConfig, Hatchet, concurrency, on_failure_step, step, workflow
from .worker import Worker, WorkerStartOptions, WorkerStatus
from .hatchet import Hatchet, concurrency, on_failure_step, step, workflow
from .loader import ClientConfig
from .worker.worker import Worker, WorkerStartOptions, WorkerStatus
from .workflow import ConcurrencyExpression

__all__ = [
Expand Down
10 changes: 5 additions & 5 deletions hatchet_sdk/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ def from_environment(
defaults: ClientConfig = ClientConfig(),
debug: bool = False,
*opts_functions: Callable[[ClientConfig], None],
):
) -> "Client":
try:
loop = asyncio.get_running_loop()
except RuntimeError:
Expand All @@ -48,7 +48,7 @@ def from_config(
cls,
config: ClientConfig = ClientConfig(),
debug: bool = False,
):
) -> "Client":
try:
loop = asyncio.get_running_loop()
except RuntimeError:
Expand Down Expand Up @@ -89,7 +89,7 @@ def __init__(
rest_client: RestApi,
config: ClientConfig,
debug: bool = False,
):
) -> None:
try:
loop = asyncio.get_running_loop()
except RuntimeError:
Expand All @@ -107,8 +107,8 @@ def __init__(
self.debug = debug


def with_host_port(host: str, port: int):
def with_host_port_impl(config: ClientConfig):
def with_host_port(host: str, port: int) -> Callable[[ClientConfig], None]:
def with_host_port_impl(config: ClientConfig) -> None:
config.host = host
config.port = port

Expand Down
Loading
Loading