diff --git a/.cursor/rules/docs-snippet-workflow.mdc b/.cursor/rules/docs-snippet-workflow.mdc new file mode 100644 index 0000000000..b32472acf3 --- /dev/null +++ b/.cursor/rules/docs-snippet-workflow.mdc @@ -0,0 +1,33 @@ +--- +description: How to edit guide code examples and regenerate snippets +globs: sdks/guides/**/*,frontend/docs/pages/guides/**/*.mdx,examples/*/guides/**/* +alwaysApply: false +--- + +# Guide Snippet Workflow + +## Source of truth + +All guide code lives in `sdks/guides/{lang}/`. The files under `examples/{lang}/guides/` are **generated** mirrors. Never edit the `examples/` copies directly. + +## Snippet markers + +Use `# > Step Title` (or `// >` for TS/Go) to open a snippet and `# !!` (or `// !!`) to close it. The generator converts the title to snake_case for the snippet key (e.g. `# > Step 04 Rate Limited Scrape` → `step_04_rate_limited_scrape`). + +## Referencing snippets in MDX + +``` +snippets.{lang}.guides.{guide_dir}.{file_stem}.{snippet_key} +``` + +Example: `snippets.python.guides.web_scraping.worker.step_01_define_scrape_task` + +## Regenerating + +After editing any file in `sdks/guides/`, run: + +``` +cd frontend/snippets && python3 generate.py +``` + +This regenerates both `examples/*/guides/` mirror files and `frontend/docs/lib/generated/snippets/index.ts`. diff --git a/.cursor/rules/docs-writing-style.mdc b/.cursor/rules/docs-writing-style.mdc new file mode 100644 index 0000000000..2e4cb9fa54 --- /dev/null +++ b/.cursor/rules/docs-writing-style.mdc @@ -0,0 +1,22 @@ +--- +description: Writing style conventions for Hatchet documentation +globs: frontend/docs/**/*.mdx +alwaysApply: false +--- + +# Docs Writing Style + +## Punctuation + +- Do NOT use em dashes (—). Use commas, parentheses, or separate sentences instead. +- Prefer short, direct sentences over long compound ones. + +## Phrases to Avoid + +- Do not use "under the hood." Just state the fact directly. + +## Terminology + +- Do not use the word "cycle" to describe agent loops. Use "child spawning" or "self-spawning child task" instead. +- Use "durable task" (not "durable function" or "durable workflow step") for the core execution primitive. +- Link concept terms to their concept pages on first use (e.g. `[durable task](/concepts/durable-workflows/durable-task-execution)`). diff --git a/.golangci.yml b/.golangci.yml index 51be2b9267..55c391f263 100644 --- a/.golangci.yml +++ b/.golangci.yml @@ -55,7 +55,7 @@ linters: paths: - third_party$ - builtin$ - - examples$ + - ^examples/ - '(.+)_test\.go' - "cmd/hatchet-loadtest/rampup/(.+).go" formatters: @@ -71,4 +71,4 @@ formatters: paths: - third_party$ - builtin$ - - examples$ + - ^examples/ diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 9dd86b7139..349847294f 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -17,5 +17,5 @@ repos: rev: v2.7.2 hooks: - id: golangci-lint - args: ["--config=.golangci.yml"] - exclude: ^examples/ + args: ["--config=.golangci.yml", "--allow-parallel-runners"] + exclude: ^(examples/|sdks/guides/go/) diff --git a/Taskfile.yaml b/Taskfile.yaml index dfa098f374..c3d9d94e00 100644 --- a/Taskfile.yaml +++ b/Taskfile.yaml @@ -212,10 +212,26 @@ tasks: lint: cmds: - task: lint-go + - task: lint-go-guides + - task: lint-python-guides + - task: lint-typescript-guides + - task: lint-ruby-guides - task: lint-app - task: lint-docs lint-go: cmd: golangci-lint run ./... --config .golangci.yml + lint-go-guides: + dir: sdks/guides/go + cmd: golangci-lint run ./... --config .golangci.yml + lint-python-guides: + dir: sdks/guides/python + cmd: poetry run ruff check . + lint-typescript-guides: + dir: sdks/guides/typescript + cmd: pnpm install && pnpm run lint:check + lint-ruby-guides: + dir: sdks/guides/ruby + cmd: bundle install && bundle exec rubocop lint-app: dir: frontend/app cmd: pnpm run lint:check diff --git a/cmd/hatchet-cli/cli/templates/typescript/bun/package.json b/cmd/hatchet-cli/cli/templates/typescript/bun/package.json index a24178e3b3..166c191f92 100644 --- a/cmd/hatchet-cli/cli/templates/typescript/bun/package.json +++ b/cmd/hatchet-cli/cli/templates/typescript/bun/package.json @@ -16,6 +16,6 @@ "typescript": "^5.9.3" }, "dependencies": { - "@hatchet-dev/typescript-sdk": "^1.10.3" + "@hatchet-dev/typescript-sdk": "1.10.3" } } diff --git a/cmd/hatchet-cli/cli/templates/typescript/npm/package.json b/cmd/hatchet-cli/cli/templates/typescript/npm/package.json index e2e89e3f22..f7f087c1cb 100644 --- a/cmd/hatchet-cli/cli/templates/typescript/npm/package.json +++ b/cmd/hatchet-cli/cli/templates/typescript/npm/package.json @@ -16,6 +16,6 @@ "typescript": "^5.9.3" }, "dependencies": { - "@hatchet-dev/typescript-sdk": "^1.10.3" + "@hatchet-dev/typescript-sdk": "1.10.3" } } diff --git a/cmd/hatchet-cli/cli/templates/typescript/pnpm/package.json b/cmd/hatchet-cli/cli/templates/typescript/pnpm/package.json index 12636c547e..9631d8d9d6 100644 --- a/cmd/hatchet-cli/cli/templates/typescript/pnpm/package.json +++ b/cmd/hatchet-cli/cli/templates/typescript/pnpm/package.json @@ -16,7 +16,7 @@ "typescript": "^5.9.3" }, "dependencies": { - "@hatchet-dev/typescript-sdk": "^1.10.3" + "@hatchet-dev/typescript-sdk": "1.10.3" }, "pnpm": { "overrides": { diff --git a/cmd/hatchet-cli/cli/templates/typescript/pnpm/pnpm-lock.yaml b/cmd/hatchet-cli/cli/templates/typescript/pnpm/pnpm-lock.yaml index 3d9d13809a..1f4e2772a6 100644 --- a/cmd/hatchet-cli/cli/templates/typescript/pnpm/pnpm-lock.yaml +++ b/cmd/hatchet-cli/cli/templates/typescript/pnpm/pnpm-lock.yaml @@ -12,7 +12,7 @@ importers: .: dependencies: '@hatchet-dev/typescript-sdk': - specifier: ^1.10.3 + specifier: 1.10.3 version: 1.10.3 devDependencies: '@types/node': diff --git a/cmd/hatchet-cli/cli/templates/typescript/yarn/package.json b/cmd/hatchet-cli/cli/templates/typescript/yarn/package.json index e2e89e3f22..f7f087c1cb 100644 --- a/cmd/hatchet-cli/cli/templates/typescript/yarn/package.json +++ b/cmd/hatchet-cli/cli/templates/typescript/yarn/package.json @@ -16,6 +16,6 @@ "typescript": "^5.9.3" }, "dependencies": { - "@hatchet-dev/typescript-sdk": "^1.10.3" + "@hatchet-dev/typescript-sdk": "1.10.3" } } diff --git a/examples/go/guides/ai-agents/main.go b/examples/go/guides/ai-agents/main.go new file mode 100644 index 0000000000..381bc7d9cd --- /dev/null +++ b/examples/go/guides/ai-agents/main.go @@ -0,0 +1,71 @@ +package main + +import ( + "log" + + "github.com/hatchet-dev/hatchet/pkg/cmdutils" + hatchet "github.com/hatchet-dev/hatchet/sdks/go" +) + +func main() { + client, err := hatchet.NewClient() + if err != nil { + log.Fatalf("failed to create hatchet client: %v", err) + } + + // > Step 02 Reasoning Loop + agentReasoningLoop := func(query string) (map[string]interface{}, error) { + messages := []map[string]interface{}{{"role": "user", "content": query}} + for i := 0; i < 10; i++ { + resp := CallLLM(messages) + if resp.Done { + return map[string]interface{}{"response": resp.Content}, nil + } + for _, tc := range resp.ToolCalls { + args := make(map[string]interface{}) + for k, v := range tc.Args { + args[k] = v + } + result := RunTool(tc.Name, args) + messages = append(messages, map[string]interface{}{"role": "tool", "content": result}) + } + } + return map[string]interface{}{"response": "Max iterations reached"}, nil + } + + // > Step 01 Define Agent Task + agentTask := client.NewStandaloneDurableTask("reasoning-loop-agent", func(ctx hatchet.DurableContext, input map[string]interface{}) (map[string]interface{}, error) { + query := "Hello" + if q, ok := input["query"].(string); ok && q != "" { + query = q + } + return agentReasoningLoop(query) + }) + + // > Step 03 Stream Response + streamingTask := client.NewStandaloneDurableTask("streaming-agent-task", func(ctx hatchet.DurableContext, input map[string]interface{}) (map[string]interface{}, error) { + tokens := []string{"Hello", " ", "world", "!"} + for _, t := range tokens { + ctx.PutStream(t) + } + return map[string]interface{}{"done": true}, nil + }) + + // > Step 04 Run Worker + worker, err := client.NewWorker("agent-worker", + hatchet.WithWorkflows(agentTask, streamingTask), + hatchet.WithSlots(5), + hatchet.WithDurableSlots(5), + ) + if err != nil { + log.Fatalf("failed to create worker: %v", err) + } + + interruptCtx, cancel := cmdutils.NewInterruptContext() + defer cancel() + + if err := worker.StartBlocking(interruptCtx); err != nil { + cancel() + log.Fatalf("failed to start worker: %v", err) + } +} diff --git a/examples/go/guides/ai-agents/mock_agent.go b/examples/go/guides/ai-agents/mock_agent.go new file mode 100644 index 0000000000..c80da2cc1b --- /dev/null +++ b/examples/go/guides/ai-agents/mock_agent.go @@ -0,0 +1,40 @@ +package main + +// CallLLM is a mock - no external LLM API. +// First call returns tool_calls; second returns final answer. +var llmCallCount int + +type LLMResponse struct { + Content string + ToolCalls []ToolCall + Done bool +} + +type ToolCall struct { + Name string + Args map[string]interface{} +} + +func CallLLM(messages []map[string]interface{}) LLMResponse { + llmCallCount++ + if llmCallCount == 1 { + return LLMResponse{ + Content: "", + ToolCalls: []ToolCall{{Name: "get_weather", Args: map[string]interface{}{"location": "SF"}}}, + Done: false, + } + } + return LLMResponse{Content: "It's 72°F and sunny in SF.", ToolCalls: nil, Done: true} +} + +// RunTool is a mock - returns canned results. +func RunTool(name string, args map[string]interface{}) string { + if name == "get_weather" { + loc := "unknown" + if v, ok := args["location"]; ok { + loc = v.(string) + } + return "Weather in " + loc + ": 72°F, sunny" + } + return "Unknown tool: " + name +} diff --git a/examples/go/guides/batch-processing/main.go b/examples/go/guides/batch-processing/main.go new file mode 100644 index 0000000000..0e44c27246 --- /dev/null +++ b/examples/go/guides/batch-processing/main.go @@ -0,0 +1,67 @@ +package main + +import ( + "log" + + "github.com/hatchet-dev/hatchet/pkg/cmdutils" + hatchet "github.com/hatchet-dev/hatchet/sdks/go" +) + +type BatchInput struct { + Items []string `json:"items"` +} + +type ItemInput struct { + ItemID string `json:"item_id"` +} + +func main() { + client, err := hatchet.NewClient() + if err != nil { + log.Fatalf("failed to create hatchet client: %v", err) + } + + // > Step 03 Process Item + childTask := client.NewStandaloneTask("process-item", func(ctx hatchet.Context, input ItemInput) (map[string]string, error) { + return map[string]string{"status": "done", "item_id": input.ItemID}, nil + }) + + // > Step 01 Define Parent Task + parentTask := client.NewStandaloneDurableTask("spawn-children", func(ctx hatchet.DurableContext, input BatchInput) (map[string]interface{}, error) { + inputs := make([]hatchet.RunManyOpt, len(input.Items)) + for i, itemID := range input.Items { + inputs[i] = hatchet.RunManyOpt{Input: ItemInput{ItemID: itemID}} + } + runRefs, err := childTask.RunMany(ctx, inputs) + if err != nil { + return nil, err + } + results := make([]interface{}, len(runRefs)) + for i, ref := range runRefs { + result, err := ref.Result() + if err != nil { + return nil, err + } + var parsed map[string]interface{} + if err := result.TaskOutput("process-item").Into(&parsed); err != nil { + return nil, err + } + results[i] = parsed + } + return map[string]interface{}{"processed": len(results), "results": results}, nil + }) + + // > Step 04 Run Worker + worker, err := client.NewWorker("batch-worker", hatchet.WithWorkflows(parentTask, childTask), hatchet.WithSlots(20)) + if err != nil { + log.Fatalf("failed to create worker: %v", err) + } + + interruptCtx, cancel := cmdutils.NewInterruptContext() + defer cancel() + + if err := worker.StartBlocking(interruptCtx); err != nil { + cancel() + log.Fatalf("failed to start worker: %v", err) + } +} diff --git a/examples/go/guides/document-processing/main.go b/examples/go/guides/document-processing/main.go new file mode 100644 index 0000000000..92bcfebe79 --- /dev/null +++ b/examples/go/guides/document-processing/main.go @@ -0,0 +1,63 @@ +package main + +import ( + "log" + + "github.com/hatchet-dev/hatchet/pkg/cmdutils" + hatchet "github.com/hatchet-dev/hatchet/sdks/go" +) + +type DocInput struct { + DocID string `json:"doc_id"` + Content []byte `json:"content"` +} + +func main() { + client, err := hatchet.NewClient() + if err != nil { + log.Fatalf("failed to create hatchet client: %v", err) + } + + // > Step 01 Define DAG + workflow := client.NewWorkflow("DocumentPipeline") + + // > Step 02 Parse Stage + ingest := workflow.NewTask("ingest", func(ctx hatchet.Context, input DocInput) (map[string]interface{}, error) { + return map[string]interface{}{"doc_id": input.DocID, "content": input.Content}, nil + }) + + parse := workflow.NewTask("parse", func(ctx hatchet.Context, input DocInput) (map[string]interface{}, error) { + var ingested map[string]interface{} + if err := ctx.ParentOutput(ingest, &ingested); err != nil { + return nil, err + } + content := ingested["content"].([]byte) + text := parseDocument(content) + return map[string]interface{}{"doc_id": input.DocID, "text": text}, nil + }, hatchet.WithParents(ingest)) + + // > Step 03 Extract Stage + extract := workflow.NewTask("extract", func(ctx hatchet.Context, input DocInput) (map[string]interface{}, error) { + var parsed map[string]interface{} + if err := ctx.ParentOutput(parse, &parsed); err != nil { + return nil, err + } + return map[string]interface{}{"doc_id": parsed["doc_id"], "entities": []string{"entity1", "entity2"}}, nil + }, hatchet.WithParents(parse)) + + _ = extract + + // > Step 04 Run Worker + worker, err := client.NewWorker("document-worker", hatchet.WithWorkflows(workflow)) + if err != nil { + log.Fatalf("failed to create worker: %v", err) + } + + interruptCtx, cancel := cmdutils.NewInterruptContext() + defer cancel() + + if err := worker.StartBlocking(interruptCtx); err != nil { + cancel() + log.Fatalf("failed to start worker: %v", err) + } +} diff --git a/examples/go/guides/document-processing/mock_ocr.go b/examples/go/guides/document-processing/mock_ocr.go new file mode 100644 index 0000000000..f447473ab8 --- /dev/null +++ b/examples/go/guides/document-processing/mock_ocr.go @@ -0,0 +1,8 @@ +package main + +import "fmt" + +// parseDocument is a mock - no external OCR dependency. +func parseDocument(content []byte) string { + return fmt.Sprintf("Parsed text from %d bytes", len(content)) +} diff --git a/examples/go/guides/evaluator-optimizer/main.go b/examples/go/guides/evaluator-optimizer/main.go new file mode 100644 index 0000000000..7fd76f3e1c --- /dev/null +++ b/examples/go/guides/evaluator-optimizer/main.go @@ -0,0 +1,108 @@ +package main + +import ( + "fmt" + "log" + + "github.com/hatchet-dev/hatchet/pkg/cmdutils" + hatchet "github.com/hatchet-dev/hatchet/sdks/go" +) + +type GeneratorInput struct { + Topic string `json:"topic"` + Audience string `json:"audience"` + PreviousDraft *string `json:"previous_draft,omitempty"` + Feedback *string `json:"feedback,omitempty"` +} + +type EvaluatorInput struct { + Draft string `json:"draft"` + Topic string `json:"topic"` + Audience string `json:"audience"` +} + +func main() { + client, err := hatchet.NewClient() + if err != nil { + log.Fatalf("failed to create hatchet client: %v", err) + } + + // > Step 01 Define Tasks + generatorTask := client.NewStandaloneTask("generate-draft", func(ctx hatchet.Context, input GeneratorInput) (map[string]interface{}, error) { + var prompt string + if input.Feedback != nil { + prompt = fmt.Sprintf("Improve this draft.\n\nDraft: %s\nFeedback: %s", *input.PreviousDraft, *input.Feedback) + } else { + prompt = fmt.Sprintf("Write a social media post about \"%s\" for %s. Under 100 words.", input.Topic, input.Audience) + } + return map[string]interface{}{"draft": MockGenerate(prompt)}, nil + }) + + evaluatorTask := client.NewStandaloneTask("evaluate-draft", func(ctx hatchet.Context, input EvaluatorInput) (map[string]interface{}, error) { + result := MockEvaluate(input.Draft) + return map[string]interface{}{"score": result.Score, "feedback": result.Feedback}, nil + }) + + // > Step 02 Optimization Loop + optimizerTask := client.NewStandaloneDurableTask("evaluator-optimizer", func(ctx hatchet.DurableContext, input map[string]interface{}) (map[string]interface{}, error) { + maxIterations := 3 + threshold := 0.8 + draft := "" + feedback := "" + topic := input["topic"].(string) + audience := input["audience"].(string) + + for i := 0; i < maxIterations; i++ { + genInput := GeneratorInput{Topic: topic, Audience: audience} + if draft != "" { + genInput.PreviousDraft = &draft + } + if feedback != "" { + genInput.Feedback = &feedback + } + genResult, err := generatorTask.Run(ctx, genInput) + if err != nil { + return nil, err + } + var genData map[string]interface{} + if err := genResult.Into(&genData); err != nil { + return nil, err + } + draft = genData["draft"].(string) + + evalResult, err := evaluatorTask.Run(ctx, EvaluatorInput{Draft: draft, Topic: topic, Audience: audience}) + if err != nil { + return nil, err + } + var evalData map[string]interface{} + if err := evalResult.Into(&evalData); err != nil { + return nil, err + } + + score := evalData["score"].(float64) + if score >= threshold { + return map[string]interface{}{"draft": draft, "iterations": i + 1, "score": score}, nil + } + feedback = evalData["feedback"].(string) + } + + return map[string]interface{}{"draft": draft, "iterations": maxIterations, "score": -1}, nil + }) + + // > Step 03 Run Worker + worker, err := client.NewWorker("evaluator-optimizer-worker", + hatchet.WithWorkflows(generatorTask, evaluatorTask, optimizerTask), + hatchet.WithSlots(5), + hatchet.WithDurableSlots(5), + ) + if err != nil { + log.Fatalf("failed to create worker: %v", err) + } + + interruptCtx, cancel := cmdutils.NewInterruptContext() + defer cancel() + + if err := worker.StartBlocking(interruptCtx); err != nil { + log.Fatalf("failed to start worker: %v", err) + } +} diff --git a/examples/go/guides/evaluator-optimizer/mock_llm.go b/examples/go/guides/evaluator-optimizer/mock_llm.go new file mode 100644 index 0000000000..92028718d4 --- /dev/null +++ b/examples/go/guides/evaluator-optimizer/mock_llm.go @@ -0,0 +1,23 @@ +package main + +var generateCount int + +func MockGenerate(prompt string) string { + generateCount++ + if generateCount == 1 { + return "Check out our product! Buy now!" + } + return "Discover how our tool saves teams 10 hours/week. Try it free." +} + +type EvalResult struct { + Score float64 + Feedback string +} + +func MockEvaluate(draft string) EvalResult { + if len(draft) < 40 { + return EvalResult{Score: 0.4, Feedback: "Too short and pushy. Add a specific benefit and soften the CTA."} + } + return EvalResult{Score: 0.9, Feedback: "Clear value prop, appropriate tone."} +} diff --git a/examples/go/guides/event-driven/main.go b/examples/go/guides/event-driven/main.go new file mode 100644 index 0000000000..4a0ca55a4d --- /dev/null +++ b/examples/go/guides/event-driven/main.go @@ -0,0 +1,53 @@ +package main + +import ( + "context" + "log" + + "github.com/hatchet-dev/hatchet/pkg/cmdutils" + hatchet "github.com/hatchet-dev/hatchet/sdks/go" +) + +type EventInput struct { + Message string `json:"message"` + Source string `json:"source"` +} + +func main() { + client, err := hatchet.NewClient() + if err != nil { + log.Fatalf("failed to create hatchet client: %v", err) + } + + // > Step 01 Define Event Task + task := client.NewStandaloneTask("process-event", func(ctx hatchet.Context, input EventInput) (map[string]string, error) { + source := input.Source + if source == "" { + source = "api" + } + return map[string]string{"processed": input.Message, "source": source}, nil + }, hatchet.WithWorkflowEvents("order:created", "user:signup")) + + // > Step 02 Register Event Trigger + // Push an event from your app. Call this from your webhook handler or API. + pushEvent := func() { + _ = client.Events().Push(context.Background(), "order:created", map[string]interface{}{ + "message": "Order #1234", + "source": "webhook", + }) + } + _ = pushEvent + + // > Step 04 Run Worker + worker, err := client.NewWorker("event-driven-worker", hatchet.WithWorkflows(task)) + if err != nil { + log.Fatalf("failed to create worker: %v", err) + } + + interruptCtx, cancel := cmdutils.NewInterruptContext() + defer cancel() + + if err := worker.StartBlocking(interruptCtx); err != nil { + log.Fatalf("failed to start worker: %v", err) + } +} diff --git a/examples/go/guides/event-driven/trigger.go b/examples/go/guides/event-driven/trigger.go new file mode 100644 index 0000000000..6afa251c7d --- /dev/null +++ b/examples/go/guides/event-driven/trigger.go @@ -0,0 +1,16 @@ +package main + +import ( + "context" + + hatchet "github.com/hatchet-dev/hatchet/sdks/go" +) + +// > Step 03 Push Event +// Push an event to trigger the workflow. Use the same key as WithWorkflowEvents. +func pushEvent(client *hatchet.Client) { + _ = client.Events().Push(context.Background(), "order:created", map[string]interface{}{ + "message": "Order #1234", + "source": "webhook", + }) +} diff --git a/examples/go/guides/go.mod b/examples/go/guides/go.mod new file mode 100644 index 0000000000..a663e2018c --- /dev/null +++ b/examples/go/guides/go.mod @@ -0,0 +1,80 @@ +module github.com/hatchet-dev/hatchet/examples/go/guides + +go 1.25.0 + +require ( + github.com/hatchet-dev/hatchet v1.28.0 + github.com/sashabaranov/go-openai v1.28.0 +) + +require ( + cel.dev/expr v0.25.1 // indirect + github.com/Masterminds/semver/v3 v3.4.0 // indirect + github.com/antlr4-go/antlr/v4 v4.13.1 // indirect + github.com/apapsch/go-jsonmerge/v2 v2.0.0 // indirect + github.com/cockroachdb/errors v1.12.0 // indirect + github.com/cockroachdb/logtags v0.0.0-20230118201751-21c54148d20b // indirect + github.com/cockroachdb/redact v1.1.5 // indirect + github.com/creasty/defaults v1.8.0 // indirect + github.com/fsnotify/fsnotify v1.9.0 // indirect + github.com/gabriel-vasile/mimetype v1.4.12 // indirect + github.com/getkin/kin-openapi v0.133.0 // indirect + github.com/getsentry/sentry-go v0.43.0 // indirect + github.com/go-openapi/jsonpointer v0.21.0 // indirect + github.com/go-openapi/swag v0.23.0 // indirect + github.com/go-playground/locales v0.14.1 // indirect + github.com/go-playground/universal-translator v0.18.1 // indirect + github.com/go-playground/validator/v10 v10.30.1 // indirect + github.com/go-viper/mapstructure/v2 v2.4.0 // indirect + github.com/gogo/protobuf v1.3.2 // indirect + github.com/google/cel-go v0.27.0 // indirect + github.com/google/uuid v1.6.0 // indirect + github.com/grpc-ecosystem/go-grpc-middleware/v2 v2.3.3 // indirect + github.com/hashicorp/errwrap v1.1.0 // indirect + github.com/hashicorp/go-multierror v1.1.1 // indirect + github.com/jackc/pgpassfile v1.0.0 // indirect + github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 // indirect + github.com/jackc/pgx/v5 v5.8.0 // indirect + github.com/josharian/intern v1.0.0 // indirect + github.com/kr/pretty v0.3.1 // indirect + github.com/kr/text v0.2.0 // indirect + github.com/labstack/echo/v4 v4.15.1 // indirect + github.com/labstack/gommon v0.4.2 // indirect + github.com/leodido/go-urn v1.4.0 // indirect + github.com/mailru/easyjson v0.7.7 // indirect + github.com/mattn/go-colorable v0.1.14 // indirect + github.com/mattn/go-isatty v0.0.20 // indirect + github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826 // indirect + github.com/oapi-codegen/runtime v1.2.0 // indirect + github.com/oasdiff/yaml v0.0.0-20250309154309-f31be36b4037 // indirect + github.com/oasdiff/yaml3 v0.0.0-20250309153720-d2182401db90 // indirect + github.com/pelletier/go-toml/v2 v2.2.4 // indirect + github.com/perimeterx/marshmallow v1.1.5 // indirect + github.com/pkg/errors v0.9.1 // indirect + github.com/robfig/cron/v3 v3.0.1 // indirect + github.com/rogpeppe/go-internal v1.14.1 // indirect + github.com/rs/zerolog v1.34.0 // indirect + github.com/sagikazarmark/locafero v0.11.0 // indirect + github.com/sourcegraph/conc v0.3.1-0.20240121214520-5f936abd7ae8 // indirect + github.com/spf13/afero v1.15.0 // indirect + github.com/spf13/cast v1.10.0 // indirect + github.com/spf13/pflag v1.0.10 // indirect + github.com/spf13/viper v1.21.0 // indirect + github.com/subosito/gotenv v1.6.0 // indirect + github.com/valyala/bytebufferpool v1.0.0 // indirect + github.com/valyala/fasttemplate v1.2.2 // indirect + github.com/woodsbury/decimal128 v1.3.0 // indirect + go.yaml.in/yaml/v3 v3.0.4 // indirect + golang.org/x/crypto v0.48.0 // indirect + golang.org/x/exp v0.0.0-20260218203240-3dfff04db8fa // indirect + golang.org/x/net v0.50.0 // indirect + golang.org/x/sync v0.19.0 // indirect + golang.org/x/sys v0.41.0 // indirect + golang.org/x/text v0.34.0 // indirect + google.golang.org/genproto/googleapis/api v0.0.0-20260209200024-4cfbd4190f57 // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20260217215200-42d3e9bedb6d // indirect + google.golang.org/grpc v1.79.1 // indirect + google.golang.org/protobuf v1.36.11 // indirect + gopkg.in/yaml.v3 v3.0.1 // indirect +) + diff --git a/examples/go/guides/human-in-the-loop/main.go b/examples/go/guides/human-in-the-loop/main.go new file mode 100644 index 0000000000..7226cbfe67 --- /dev/null +++ b/examples/go/guides/human-in-the-loop/main.go @@ -0,0 +1,77 @@ +package main + +import ( + "fmt" + "log" + + "github.com/hatchet-dev/hatchet/pkg/cmdutils" + hatchet "github.com/hatchet-dev/hatchet/sdks/go" +) + +type ApprovalInput struct { + Action string `json:"action"` + To string `json:"to"` +} + +type ApprovalOutput struct { + Status string `json:"status"` + Action interface{} `json:"action,omitempty"` + Reason string `json:"reason,omitempty"` +} + +func main() { + client, err := hatchet.NewClient() + if err != nil { + log.Fatalf("failed to create hatchet client: %v", err) + } + + // > Step 02 Wait For Event + waitForApproval := func(ctx hatchet.DurableContext) (map[string]interface{}, error) { + runID := ctx.WorkflowRunId() + expression := fmt.Sprintf("input.runId == '%s'", runID) + event, err := ctx.WaitForEvent("approval:decision", expression) + if err != nil { + return nil, err + } + var eventData map[string]interface{} + if err := hatchet.EventInto(event, &eventData); err != nil { + return nil, err + } + return eventData, nil + } + + // > Step 01 Define Approval Task + task := client.NewStandaloneDurableTask("approval-task", func(ctx hatchet.DurableContext, input ApprovalInput) (ApprovalOutput, error) { + proposedAction := map[string]string{"action": "send_email", "to": "user@example.com"} + approval, err := waitForApproval(ctx) + if err != nil { + return ApprovalOutput{}, err + } + approved, _ := approval["approved"].(bool) + if approved { + return ApprovalOutput{Status: "approved", Action: proposedAction}, nil + } + reason, _ := approval["reason"].(string) + return ApprovalOutput{Status: "rejected", Reason: reason}, nil + }) + + // > Step 04 Run Worker + worker, err := client.NewWorker("human-in-the-loop-worker", + hatchet.WithWorkflows(task), + hatchet.WithDurableSlots(5), + ) + if err != nil { + log.Fatalf("failed to create worker: %v", err) + } + + interruptCtx, cancel := cmdutils.NewInterruptContext() + defer cancel() + + go func() { + if err := worker.StartBlocking(interruptCtx); err != nil { + log.Fatalf("failed to start worker: %v", err) + } + }() + + <-interruptCtx.Done() +} diff --git a/examples/go/guides/human-in-the-loop/trigger.go b/examples/go/guides/human-in-the-loop/trigger.go new file mode 100644 index 0000000000..1eb2739add --- /dev/null +++ b/examples/go/guides/human-in-the-loop/trigger.go @@ -0,0 +1,17 @@ +package main + +import ( + "context" + + hatchet "github.com/hatchet-dev/hatchet/sdks/go" +) + +// > Step 03 Push Approval Event +// Include the runID so the event matches the specific task waiting for it. +func pushApproval(client *hatchet.Client, runID string, approved bool, reason string) { + _ = client.Events().Push(context.Background(), "approval:decision", map[string]interface{}{ + "runId": runID, + "approved": approved, + "reason": reason, + }) +} diff --git a/examples/go/guides/integrations/embedding_openai.go b/examples/go/guides/integrations/embedding_openai.go new file mode 100644 index 0000000000..a01f8ed5b6 --- /dev/null +++ b/examples/go/guides/integrations/embedding_openai.go @@ -0,0 +1,24 @@ +// Third-party integration - requires: go get github.com/sashabaranov/go-openai +// See: /guides/rag-and-indexing + +package integrations + +import ( + "context" + "os" + + "github.com/sashabaranov/go-openai" +) + +// > OpenAI embedding usage +func Embed(ctx context.Context, text string) ([]float32, error) { + client := openai.NewClient(os.Getenv("OPENAI_API_KEY")) + resp, err := client.CreateEmbeddings(ctx, openai.EmbeddingRequest{ + Model: openai.AdaEmbeddingV2, + Input: text, + }) + if err != nil { + return nil, err + } + return resp.Data[0].Embedding, nil +} diff --git a/examples/go/guides/integrations/llm_openai.go b/examples/go/guides/integrations/llm_openai.go new file mode 100644 index 0000000000..99bafe9aa7 --- /dev/null +++ b/examples/go/guides/integrations/llm_openai.go @@ -0,0 +1,36 @@ +// Third-party integration - requires: go get github.com/sashabaranov/go-openai +// See: /guides/ai-agents + +package integrations + +import ( + "context" + "encoding/json" + "os" + + "github.com/sashabaranov/go-openai" +) + +// > OpenAI usage +func Complete(ctx context.Context, messages []openai.ChatCompletionMessage) (map[string]interface{}, error) { + client := openai.NewClient(os.Getenv("OPENAI_API_KEY")) + resp, err := client.CreateChatCompletion(ctx, openai.ChatCompletionRequest{ + Model: openai.GPT4oMini, + Messages: messages, + }) + if err != nil { + return nil, err + } + msg := resp.Choices[0].Message + toolCalls := make([]map[string]interface{}, 0) + for _, tc := range msg.ToolCalls { + var args map[string]interface{} + _ = json.Unmarshal([]byte(tc.Function.Arguments), &args) + toolCalls = append(toolCalls, map[string]interface{}{"name": tc.Function.Name, "args": args}) + } + return map[string]interface{}{ + "content": msg.Content, + "tool_calls": toolCalls, + "done": len(toolCalls) == 0, + }, nil +} diff --git a/examples/go/guides/integrations/ocr_tesseract.go b/examples/go/guides/integrations/ocr_tesseract.go new file mode 100644 index 0000000000..e30b5fd459 --- /dev/null +++ b/examples/go/guides/integrations/ocr_tesseract.go @@ -0,0 +1,16 @@ +//go:build ignore + +// Third-party integration - requires: go get github.com/otiai10/gosseract/v2 +// and Tesseract C library. Build tag excludes from default build (no native deps in CI). +// See: /guides/document-processing + +package integrations + +import "github.com/otiai10/gosseract/v2" + +// > Tesseract usage +func ParseDocument(content []byte) (string, error) { + client := gosseract.NewClient() + defer client.Close() + return client.SetImageFromBytes(content).GetText() +} diff --git a/examples/go/guides/llm-pipelines/main.go b/examples/go/guides/llm-pipelines/main.go new file mode 100644 index 0000000000..ba8d23ff4d --- /dev/null +++ b/examples/go/guides/llm-pipelines/main.go @@ -0,0 +1,72 @@ +package main + +import ( + "log" + + "github.com/hatchet-dev/hatchet/pkg/cmdutils" + hatchet "github.com/hatchet-dev/hatchet/sdks/go" +) + +type PipelineInput struct { + Prompt string `json:"prompt"` +} + +// generate is a mock - no external LLM API. +func generate(prompt string) map[string]interface{} { + n := 50 + if len(prompt) < n { + n = len(prompt) + } + return map[string]interface{}{"content": "Generated for: " + prompt[:n] + "...", "valid": true} +} + +func main() { + client, err := hatchet.NewClient() + if err != nil { + log.Fatalf("failed to create hatchet client: %v", err) + } + + // > Step 01 Define Pipeline + workflow := client.NewWorkflow("LLMPipeline") + + // > Step 02 Prompt Task + buildPrompt := func(userInput, context string) string { + if context != "" { + return "Process the following: " + userInput + "\nContext: " + context + } + return "Process the following: " + userInput + } + _ = buildPrompt + + promptTask := workflow.NewTask("prompt-task", func(ctx hatchet.Context, input PipelineInput) (map[string]interface{}, error) { + return map[string]interface{}{"prompt": input.Prompt}, nil + }) + + // > Step 03 Validate Task + generateTask := workflow.NewTask("generate-task", func(ctx hatchet.Context, input PipelineInput) (map[string]interface{}, error) { + var prev map[string]interface{} + if err := ctx.ParentOutput(promptTask, &prev); err != nil { + return nil, err + } + output := generate(prev["prompt"].(string)) + if !output["valid"].(bool) { + panic("validation failed") + } + return output, nil + }, hatchet.WithParents(promptTask)) + + _ = generateTask + + // > Step 04 Run Worker + worker, err := client.NewWorker("llm-pipeline-worker", hatchet.WithWorkflows(workflow)) + if err != nil { + log.Fatalf("failed to create worker: %v", err) + } + + interruptCtx, cancel := cmdutils.NewInterruptContext() + defer cancel() + + if err := worker.StartBlocking(interruptCtx); err != nil { + log.Fatalf("failed to start worker: %v", err) + } +} diff --git a/examples/go/guides/multi-agent/main.go b/examples/go/guides/multi-agent/main.go new file mode 100644 index 0000000000..4f91654979 --- /dev/null +++ b/examples/go/guides/multi-agent/main.go @@ -0,0 +1,100 @@ +package main + +import ( + "fmt" + "log" + "strings" + + "github.com/hatchet-dev/hatchet/pkg/cmdutils" + hatchet "github.com/hatchet-dev/hatchet/sdks/go" +) + +type SpecialistInput struct { + Task string `json:"task"` + Context string `json:"context,omitempty"` +} + +func main() { + client, err := hatchet.NewClient() + if err != nil { + log.Fatalf("failed to create hatchet client: %v", err) + } + + // > Step 01 Specialist Agents + researchTask := client.NewStandaloneDurableTask("research-specialist", func(ctx hatchet.DurableContext, input SpecialistInput) (map[string]interface{}, error) { + return map[string]interface{}{"result": MockSpecialistLLM(input.Task, "research")}, nil + }) + + writingTask := client.NewStandaloneDurableTask("writing-specialist", func(ctx hatchet.DurableContext, input SpecialistInput) (map[string]interface{}, error) { + return map[string]interface{}{"result": MockSpecialistLLM(input.Task, "writing")}, nil + }) + + codeTask := client.NewStandaloneDurableTask("code-specialist", func(ctx hatchet.DurableContext, input SpecialistInput) (map[string]interface{}, error) { + return map[string]interface{}{"result": MockSpecialistLLM(input.Task, "code")}, nil + }) + + specialists := map[string]*hatchet.StandaloneTask{ + "research": researchTask, + "writing": writingTask, + "code": codeTask, + } + + // > Step 02 Orchestrator Loop + orchestrator := client.NewStandaloneDurableTask("multi-agent-orchestrator", func(ctx hatchet.DurableContext, input map[string]interface{}) (map[string]interface{}, error) { + messages := []map[string]interface{}{{"role": "user", "content": input["goal"].(string)}} + + for i := 0; i < 10; i++ { + response := MockOrchestratorLLM(messages) + + if response.Done { + return map[string]interface{}{"result": response.Content}, nil + } + + specialist, ok := specialists[response.ToolCall.Name] + if !ok { + return nil, fmt.Errorf("unknown specialist: %s", response.ToolCall.Name) + } + + var contextParts []string + for _, m := range messages { + contextParts = append(contextParts, m["content"].(string)) + } + + taskResult, err := specialist.Run(ctx, SpecialistInput{ + Task: response.ToolCall.Args["task"], + Context: strings.Join(contextParts, "\n"), + }) + if err != nil { + return nil, err + } + var result map[string]interface{} + if err := taskResult.Into(&result); err != nil { + return nil, err + } + + messages = append(messages, + map[string]interface{}{"role": "assistant", "content": fmt.Sprintf("Called %s", response.ToolCall.Name)}, + map[string]interface{}{"role": "tool", "content": result["result"].(string)}, + ) + } + + return map[string]interface{}{"result": "Max iterations reached"}, nil + }) + + // > Step 03 Run Worker + worker, err := client.NewWorker("multi-agent-worker", + hatchet.WithWorkflows(researchTask, writingTask, codeTask, orchestrator), + hatchet.WithSlots(10), + hatchet.WithDurableSlots(5), + ) + if err != nil { + log.Fatalf("failed to create worker: %v", err) + } + + interruptCtx, cancel := cmdutils.NewInterruptContext() + defer cancel() + + if err := worker.StartBlocking(interruptCtx); err != nil { + log.Fatalf("failed to start worker: %v", err) + } +} diff --git a/examples/go/guides/multi-agent/mock_llm.go b/examples/go/guides/multi-agent/mock_llm.go new file mode 100644 index 0000000000..66ae295af6 --- /dev/null +++ b/examples/go/guides/multi-agent/mock_llm.go @@ -0,0 +1,36 @@ +package main + +import "fmt" + +var orchestratorCallCount int + +type OrchestratorResponse struct { + Done bool + Content string + ToolCall *struct { + Name string + Args map[string]string + } +} + +func MockOrchestratorLLM(messages []map[string]interface{}) OrchestratorResponse { + orchestratorCallCount++ + switch orchestratorCallCount { + case 1: + return OrchestratorResponse{Done: false, ToolCall: &struct { + Name string + Args map[string]string + }{Name: "research", Args: map[string]string{"task": "Find key facts about the topic"}}} + case 2: + return OrchestratorResponse{Done: false, ToolCall: &struct { + Name string + Args map[string]string + }{Name: "writing", Args: map[string]string{"task": "Write a summary from the research"}}} + default: + return OrchestratorResponse{Done: true, Content: "Here is the final report combining research and writing."} + } +} + +func MockSpecialistLLM(task, role string) string { + return fmt.Sprintf("[%s] Completed: %s", role, task) +} diff --git a/examples/go/guides/parallelization/main.go b/examples/go/guides/parallelization/main.go new file mode 100644 index 0000000000..08534a8355 --- /dev/null +++ b/examples/go/guides/parallelization/main.go @@ -0,0 +1,140 @@ +package main + +import ( + "log" + "sync" + + "github.com/hatchet-dev/hatchet/pkg/cmdutils" + hatchet "github.com/hatchet-dev/hatchet/sdks/go" +) + +type MessageInput struct { + Message string `json:"message"` +} + +type ContentInput struct { + Content string `json:"content"` +} + +func main() { + client, err := hatchet.NewClient() + if err != nil { + log.Fatalf("failed to create hatchet client: %v", err) + } + + // > Step 01 Parallel Tasks + contentTask := client.NewStandaloneTask("generate-content", func(ctx hatchet.Context, input MessageInput) (map[string]interface{}, error) { + return map[string]interface{}{"content": MockGenerateContent(input.Message)}, nil + }) + + safetyTask := client.NewStandaloneTask("safety-check", func(ctx hatchet.Context, input MessageInput) (map[string]interface{}, error) { + result := MockSafetyCheck(input.Message) + return map[string]interface{}{"safe": result.Safe, "reason": result.Reason}, nil + }) + + evaluateTask := client.NewStandaloneTask("evaluate-content", func(ctx hatchet.Context, input ContentInput) (map[string]interface{}, error) { + result := MockEvaluateContent(input.Content) + return map[string]interface{}{"score": result.Score, "approved": result.Approved}, nil + }) + + // > Step 02 Sectioning + sectioningTask := client.NewStandaloneDurableTask("parallel-sectioning", func(ctx hatchet.DurableContext, input map[string]interface{}) (map[string]interface{}, error) { + msg := input["message"].(string) + + var contentTr, safetyTr *hatchet.TaskResult + var contentErr, safetyErr error + var wg sync.WaitGroup + wg.Add(2) + + go func() { + defer wg.Done() + contentTr, contentErr = contentTask.Run(ctx, MessageInput{Message: msg}) + }() + go func() { + defer wg.Done() + safetyTr, safetyErr = safetyTask.Run(ctx, MessageInput{Message: msg}) + }() + wg.Wait() + + if contentErr != nil { + return nil, contentErr + } + if safetyErr != nil { + return nil, safetyErr + } + var contentResult, safetyResult map[string]interface{} + if err := contentTr.Into(&contentResult); err != nil { + return nil, err + } + if err := safetyTr.Into(&safetyResult); err != nil { + return nil, err + } + + if safe, ok := safetyResult["safe"].(bool); !ok || !safe { + return map[string]interface{}{"blocked": true, "reason": safetyResult["reason"]}, nil + } + return map[string]interface{}{"blocked": false, "content": contentResult["content"]}, nil + }) + + // > Step 03 Voting + votingTask := client.NewStandaloneDurableTask("parallel-voting", func(ctx hatchet.DurableContext, input map[string]interface{}) (map[string]interface{}, error) { + content := input["content"].(string) + numVoters := 3 + taskResults := make([]*hatchet.TaskResult, numVoters) + errs := make([]error, numVoters) + + var wg sync.WaitGroup + for i := 0; i < numVoters; i++ { + wg.Add(1) + go func(idx int) { + defer wg.Done() + taskResults[idx], errs[idx] = evaluateTask.Run(ctx, ContentInput{Content: content}) + }(i) + } + wg.Wait() + + results := make([]map[string]interface{}, numVoters) + for i := 0; i < numVoters; i++ { + if errs[i] != nil { + return nil, errs[i] + } + if err := taskResults[i].Into(&results[i]); err != nil { + return nil, err + } + } + + approvals := 0 + totalScore := 0.0 + for _, r := range results { + if approved, ok := r["approved"].(bool); ok && approved { + approvals++ + } + if score, ok := r["score"].(float64); ok { + totalScore += score + } + } + + return map[string]interface{}{ + "approved": approvals >= 2, + "averageScore": totalScore / float64(numVoters), + "votes": numVoters, + }, nil + }) + + // > Step 04 Run Worker + worker, err := client.NewWorker("parallelization-worker", + hatchet.WithWorkflows(contentTask, safetyTask, evaluateTask, sectioningTask, votingTask), + hatchet.WithSlots(10), + hatchet.WithDurableSlots(5), + ) + if err != nil { + log.Fatalf("failed to create worker: %v", err) + } + + interruptCtx, cancel := cmdutils.NewInterruptContext() + defer cancel() + + if err := worker.StartBlocking(interruptCtx); err != nil { + log.Fatalf("failed to start worker: %v", err) + } +} diff --git a/examples/go/guides/parallelization/mock_llm.go b/examples/go/guides/parallelization/mock_llm.go new file mode 100644 index 0000000000..a8f75a2dc4 --- /dev/null +++ b/examples/go/guides/parallelization/mock_llm.go @@ -0,0 +1,32 @@ +package main + +import "strings" + +func MockGenerateContent(message string) string { + return "Here is a helpful response to: " + message +} + +type SafetyResult struct { + Safe bool + Reason string +} + +func MockSafetyCheck(message string) SafetyResult { + if strings.Contains(strings.ToLower(message), "unsafe") { + return SafetyResult{Safe: false, Reason: "Content flagged as potentially unsafe."} + } + return SafetyResult{Safe: true, Reason: "Content is appropriate."} +} + +type EvalResult struct { + Score float64 + Approved bool +} + +func MockEvaluateContent(content string) EvalResult { + score := 0.3 + if len(content) > 20 { + score = 0.85 + } + return EvalResult{Score: score, Approved: score >= 0.7} +} diff --git a/examples/go/guides/rag-and-indexing/main.go b/examples/go/guides/rag-and-indexing/main.go new file mode 100644 index 0000000000..c2a87a852d --- /dev/null +++ b/examples/go/guides/rag-and-indexing/main.go @@ -0,0 +1,130 @@ +package main + +import ( + "log" + + "github.com/hatchet-dev/hatchet/pkg/cmdutils" + hatchet "github.com/hatchet-dev/hatchet/sdks/go" +) + +type DocInput struct { + DocID string `json:"doc_id"` + Content string `json:"content"` +} + +type ChunkInput struct { + Chunk string `json:"chunk"` +} + +type QueryInput struct { + Query string `json:"query"` + TopK int `json:"top_k"` +} + +func embed(text string) []float64 { + vec := make([]float64, 64) + for i := range vec { + vec[i] = 0.1 + } + return vec +} + +func main() { + client, err := hatchet.NewClient() + if err != nil { + log.Fatalf("failed to create hatchet client: %v", err) + } + + // > Step 01 Define Workflow + workflow := client.NewWorkflow("RAGPipeline") + + // > Step 02 Define Ingest Task + ingest := workflow.NewTask("ingest", func(ctx hatchet.Context, input DocInput) (map[string]interface{}, error) { + return map[string]interface{}{"doc_id": input.DocID, "content": input.Content}, nil + }) + + // > Step 03 Chunk Task + chunkContent := func(content string, chunkSize int) []string { + var chunks []string + for i := 0; i < len(content); i += chunkSize { + end := i + chunkSize + if end > len(content) { + end = len(content) + } + chunks = append(chunks, content[i:end]) + } + return chunks + } + _ = chunkContent + + // > Step 04 Embed Task + embedChunkTask := client.NewStandaloneTask("embed-chunk", func(ctx hatchet.Context, input ChunkInput) (map[string]interface{}, error) { + return map[string]interface{}{"vector": embed(input.Chunk)}, nil + }) + + chunkAndEmbed := workflow.NewDurableTask("chunk-and-embed", func(ctx hatchet.DurableContext, input DocInput) (map[string]interface{}, error) { + var ingested map[string]interface{} + if err := ctx.ParentOutput(ingest, &ingested); err != nil { + return nil, err + } + content := ingested["content"].(string) + var chunks []string + for i := 0; i < len(content); i += 100 { + end := i + 100 + if end > len(content) { + end = len(content) + } + chunks = append(chunks, content[i:end]) + } + inputs := make([]hatchet.RunManyOpt, len(chunks)) + for i, c := range chunks { + inputs[i] = hatchet.RunManyOpt{Input: ChunkInput{Chunk: c}} + } + runRefs, err := embedChunkTask.RunMany(ctx, inputs) + if err != nil { + return nil, err + } + vectors := make([]interface{}, len(runRefs)) + for i, ref := range runRefs { + result, err := ref.Result() + if err != nil { + return nil, err + } + var parsed map[string]interface{} + if err := result.TaskOutput("embed-chunk").Into(&parsed); err != nil { + return nil, err + } + vectors[i] = parsed["vector"] + } + return map[string]interface{}{"doc_id": ingested["doc_id"], "vectors": vectors}, nil + }, hatchet.WithParents(ingest)) + + _ = chunkAndEmbed + + // > Step 05 Query Task + queryTask := client.NewStandaloneDurableTask("rag-query", func(ctx hatchet.DurableContext, input QueryInput) (map[string]interface{}, error) { + res, err := embedChunkTask.Run(ctx, ChunkInput{Chunk: input.Query}) + if err != nil { + return nil, err + } + var parsed map[string]interface{} + if err := res.Into(&parsed); err != nil { + return nil, err + } + // Replace with a real vector DB lookup in production + return map[string]interface{}{"query": input.Query, "vector": parsed["vector"], "results": []interface{}{}}, nil + }) + + // > Step 06 Run Worker + worker, err := client.NewWorker("rag-worker", hatchet.WithWorkflows(workflow, embedChunkTask, queryTask)) + if err != nil { + log.Fatalf("failed to create worker: %v", err) + } + + interruptCtx, cancel := cmdutils.NewInterruptContext() + defer cancel() + + if err := worker.StartBlocking(interruptCtx); err != nil { + log.Fatalf("failed to start worker: %v", err) + } +} diff --git a/examples/go/guides/routing/main.go b/examples/go/guides/routing/main.go new file mode 100644 index 0000000000..e468d8b039 --- /dev/null +++ b/examples/go/guides/routing/main.go @@ -0,0 +1,87 @@ +package main + +import ( + "log" + + "github.com/hatchet-dev/hatchet/pkg/cmdutils" + hatchet "github.com/hatchet-dev/hatchet/sdks/go" +) + +type MessageInput struct { + Message string `json:"message"` +} + +func main() { + client, err := hatchet.NewClient() + if err != nil { + log.Fatalf("failed to create hatchet client: %v", err) + } + + // > Step 01 Classify Task + classifyTask := client.NewStandaloneDurableTask("classify-message", func(ctx hatchet.DurableContext, input MessageInput) (map[string]interface{}, error) { + return map[string]interface{}{"category": MockClassify(input.Message)}, nil + }) + + // > Step 02 Specialist Tasks + supportTask := client.NewStandaloneDurableTask("handle-support", func(ctx hatchet.DurableContext, input MessageInput) (map[string]interface{}, error) { + return map[string]interface{}{"response": MockReply(input.Message, "support"), "category": "support"}, nil + }) + + salesTask := client.NewStandaloneDurableTask("handle-sales", func(ctx hatchet.DurableContext, input MessageInput) (map[string]interface{}, error) { + return map[string]interface{}{"response": MockReply(input.Message, "sales"), "category": "sales"}, nil + }) + + defaultTask := client.NewStandaloneDurableTask("handle-default", func(ctx hatchet.DurableContext, input MessageInput) (map[string]interface{}, error) { + return map[string]interface{}{"response": MockReply(input.Message, "other"), "category": "other"}, nil + }) + + // > Step 03 Router Task + routerTask := client.NewStandaloneDurableTask("message-router", func(ctx hatchet.DurableContext, input map[string]interface{}) (map[string]interface{}, error) { + msg := input["message"].(string) + classResult, err := classifyTask.Run(ctx, MessageInput{Message: msg}) + if err != nil { + return nil, err + } + var classData map[string]interface{} + if err := classResult.Into(&classData); err != nil { + return nil, err + } + + runAndUnmarshal := func(t *hatchet.StandaloneTask) (map[string]interface{}, error) { + tr, err := t.Run(ctx, MessageInput{Message: msg}) + if err != nil { + return nil, err + } + var out map[string]interface{} + if err := tr.Into(&out); err != nil { + return nil, err + } + return out, nil + } + switch classData["category"].(string) { + case "support": + return runAndUnmarshal(supportTask) + case "sales": + return runAndUnmarshal(salesTask) + default: + return runAndUnmarshal(defaultTask) + } + }) + + // > Step 04 Run Worker + worker, err := client.NewWorker("routing-worker", + hatchet.WithWorkflows(classifyTask, supportTask, salesTask, defaultTask, routerTask), + hatchet.WithSlots(5), + hatchet.WithDurableSlots(5), + ) + if err != nil { + log.Fatalf("failed to create worker: %v", err) + } + + interruptCtx, cancel := cmdutils.NewInterruptContext() + defer cancel() + + if err := worker.StartBlocking(interruptCtx); err != nil { + log.Fatalf("failed to start worker: %v", err) + } +} diff --git a/examples/go/guides/routing/mock_classifier.go b/examples/go/guides/routing/mock_classifier.go new file mode 100644 index 0000000000..2cf86711af --- /dev/null +++ b/examples/go/guides/routing/mock_classifier.go @@ -0,0 +1,29 @@ +package main + +import "strings" + +func MockClassify(message string) string { + lower := strings.ToLower(message) + for _, w := range []string{"bug", "error", "help"} { + if strings.Contains(lower, w) { + return "support" + } + } + for _, w := range []string{"price", "buy", "plan"} { + if strings.Contains(lower, w) { + return "sales" + } + } + return "other" +} + +func MockReply(message, role string) string { + switch role { + case "support": + return "[Support] I can help with that technical issue. Let me look into: " + message + case "sales": + return "[Sales] Great question about pricing! Here's what I can tell you about: " + message + default: + return "[General] Thanks for reaching out. Regarding: " + message + } +} diff --git a/examples/go/guides/scheduled-jobs/main.go b/examples/go/guides/scheduled-jobs/main.go new file mode 100644 index 0000000000..dfa40a84e2 --- /dev/null +++ b/examples/go/guides/scheduled-jobs/main.go @@ -0,0 +1,35 @@ +package main + +import ( + "log" + + "github.com/hatchet-dev/hatchet/pkg/cmdutils" + hatchet "github.com/hatchet-dev/hatchet/sdks/go" +) + +func main() { + client, err := hatchet.NewClient() + if err != nil { + log.Fatalf("failed to create hatchet client: %v", err) + } + + // > Step 01 Define Cron Task + task := client.NewStandaloneTask("run-scheduled-job", func(ctx hatchet.Context, input map[string]interface{}) (map[string]string, error) { + return map[string]string{"status": "completed", "job": "maintenance"}, nil + }, hatchet.WithWorkflowCron("0 * * * *")) + + // > Step 03 Run Worker + worker, err := client.NewWorker("scheduled-worker", hatchet.WithWorkflows(task)) + if err != nil { + log.Fatalf("failed to create worker: %v", err) + } + + interruptCtx, cancel := cmdutils.NewInterruptContext() + defer cancel() + + go scheduleOneTime(client) + + if err := worker.StartBlocking(interruptCtx); err != nil { + log.Fatalf("failed to start worker: %v", err) + } +} diff --git a/examples/go/guides/scheduled-jobs/trigger.go b/examples/go/guides/scheduled-jobs/trigger.go new file mode 100644 index 0000000000..293975c10d --- /dev/null +++ b/examples/go/guides/scheduled-jobs/trigger.go @@ -0,0 +1,22 @@ +package main + +import ( + "context" + "log" + "time" + + hatchet "github.com/hatchet-dev/hatchet/sdks/go" + "github.com/hatchet-dev/hatchet/sdks/go/features" +) + +// > Step 02 Schedule One Time +func scheduleOneTime(client *hatchet.Client) { + runAt := time.Now().Add(1 * time.Hour) + _, err := client.Schedules().Create(context.Background(), "run-scheduled-job", features.CreateScheduledRunTrigger{ + TriggerAt: runAt, + Input: map[string]interface{}{}, + }) + if err != nil { + log.Printf("failed to schedule: %v", err) + } +} diff --git a/examples/go/guides/streaming/client.go b/examples/go/guides/streaming/client.go new file mode 100644 index 0000000000..6bed65534c --- /dev/null +++ b/examples/go/guides/streaming/client.go @@ -0,0 +1,18 @@ +package main + +import ( + "context" + "fmt" + + hatchet "github.com/hatchet-dev/hatchet/sdks/go" +) + +// > Step 03 Subscribe Client +// Client triggers the task and subscribes to the stream. +func runAndSubscribe(client *hatchet.Client) { + runRef, _ := client.RunNoWait(context.Background(), "stream-example", map[string]interface{}{}) + stream := client.Runs().SubscribeToStream(context.Background(), runRef.RunId) + for chunk := range stream { + fmt.Print(chunk) + } +} diff --git a/examples/go/guides/streaming/main.go b/examples/go/guides/streaming/main.go new file mode 100644 index 0000000000..5805eb9b8d --- /dev/null +++ b/examples/go/guides/streaming/main.go @@ -0,0 +1,47 @@ +package main + +import ( + "log" + "time" + + "github.com/hatchet-dev/hatchet/pkg/cmdutils" + hatchet "github.com/hatchet-dev/hatchet/sdks/go" +) + +func main() { + client, err := hatchet.NewClient() + if err != nil { + log.Fatalf("failed to create hatchet client: %v", err) + } + + // > Step 01 Define Streaming Task + task := client.NewStandaloneTask("stream-example", func(ctx hatchet.Context, input map[string]interface{}) (map[string]string, error) { + for i := 0; i < 5; i++ { + ctx.PutStream("chunk-" + string(rune('0'+i))) + time.Sleep(500 * time.Millisecond) + } + return map[string]string{"status": "done"}, nil + }) + + // > Step 02 Emit Chunks + emitChunks := func(ctx hatchet.Context) { + for i := 0; i < 5; i++ { + ctx.PutStream("chunk-" + string(rune('0'+i))) + time.Sleep(500 * time.Millisecond) + } + } + _ = emitChunks + + // > Step 04 Run Worker + worker, err := client.NewWorker("streaming-worker", hatchet.WithWorkflows(task)) + if err != nil { + log.Fatalf("failed to create worker: %v", err) + } + + interruptCtx, cancel := cmdutils.NewInterruptContext() + defer cancel() + + if err := worker.StartBlocking(interruptCtx); err != nil { + log.Fatalf("failed to start worker: %v", err) + } +} diff --git a/examples/go/guides/web-scraping/main.go b/examples/go/guides/web-scraping/main.go new file mode 100644 index 0000000000..9b825cd929 --- /dev/null +++ b/examples/go/guides/web-scraping/main.go @@ -0,0 +1,124 @@ +package main + +import ( + "log" + "regexp" + "strings" + + "github.com/hatchet-dev/hatchet/pkg/client/types" + "github.com/hatchet-dev/hatchet/pkg/cmdutils" + hatchet "github.com/hatchet-dev/hatchet/sdks/go" + "github.com/hatchet-dev/hatchet/sdks/go/features" +) + +type ScrapeInput struct { + URL string `json:"url"` +} + +type ProcessInput struct { + URL string `json:"url"` + Content string `json:"content"` +} + +const scrapeRateLimitKey = "scrape-rate-limit" + +func main() { + client, err := hatchet.NewClient() + if err != nil { + log.Fatalf("failed to create hatchet client: %v", err) + } + + // > Step 01 Define Scrape Task + scrapeTask := client.NewStandaloneTask("scrape-url", func(ctx hatchet.Context, input ScrapeInput) (map[string]interface{}, error) { + result := MockScrape(input.URL) + return map[string]interface{}{ + "url": result.URL, "title": result.Title, + "content": result.Content, "scraped_at": result.ScrapedAt, + }, nil + }, hatchet.WithRetries(2)) + + // > Step 02 Process Content + linkRe := regexp.MustCompile(`https?://[^\s<>"']+`) + processTask := client.NewStandaloneTask("process-content", func(ctx hatchet.Context, input ProcessInput) (map[string]interface{}, error) { + links := linkRe.FindAllString(input.Content, -1) + summary := input.Content + if len(summary) > 200 { + summary = summary[:200] + } + wordCount := len(strings.Fields(input.Content)) + return map[string]interface{}{ + "summary": strings.TrimSpace(summary), "word_count": wordCount, "links": links, + }, nil + }) + + // > Step 03 Cron Workflow + cronWf := client.NewWorkflow("WebScrapeWorkflow", hatchet.WithWorkflowCron("0 */6 * * *")) + + cronWf.NewTask("scheduled-scrape", func(ctx hatchet.Context, input map[string]interface{}) (map[string]interface{}, error) { + urls := []string{ + "https://example.com/pricing", + "https://example.com/blog", + "https://example.com/docs", + } + + results := []map[string]string{} + for _, url := range urls { + scrapedResult, err := scrapeTask.Run(ctx, ScrapeInput{URL: url}) + if err != nil { + return nil, err + } + var scraped map[string]interface{} + if err := scrapedResult.Into(&scraped); err != nil { + return nil, err + } + processedResult, err := processTask.Run(ctx, ProcessInput{URL: url, Content: scraped["content"].(string)}) + if err != nil { + return nil, err + } + var processed map[string]string + if err := processedResult.Into(&processed); err != nil { + return nil, err + } + results = append(results, processed) + } + return map[string]interface{}{"refreshed": len(results), "results": results}, nil + }) + + // > Step 04 Rate Limited Scrape + units := 1 + rateLimitedScrapeTask := client.NewStandaloneTask("rate-limited-scrape", func(ctx hatchet.Context, input ScrapeInput) (map[string]interface{}, error) { + result := MockScrape(input.URL) + return map[string]interface{}{ + "url": result.URL, "title": result.Title, + "content": result.Content, "scraped_at": result.ScrapedAt, + }, nil + }, hatchet.WithRetries(2), hatchet.WithRateLimits(&types.RateLimit{ + Key: scrapeRateLimitKey, + Units: &units, + })) + + // > Step 05 Run Worker + err = client.RateLimits().Upsert(features.CreateRatelimitOpts{ + Key: scrapeRateLimitKey, + Limit: 10, + Duration: types.Minute, + }) + if err != nil { + log.Fatalf("failed to upsert rate limit: %v", err) + } + + worker, err := client.NewWorker("web-scraping-worker", + hatchet.WithWorkflows(scrapeTask, processTask, cronWf, rateLimitedScrapeTask), + hatchet.WithSlots(5), + ) + if err != nil { + log.Fatalf("failed to create worker: %v", err) + } + + interruptCtx, cancel := cmdutils.NewInterruptContext() + defer cancel() + + if err := worker.StartBlocking(interruptCtx); err != nil { + log.Fatalf("failed to start worker: %v", err) + } +} diff --git a/examples/go/guides/web-scraping/mock_scraper.go b/examples/go/guides/web-scraping/mock_scraper.go new file mode 100644 index 0000000000..dcc72de7a3 --- /dev/null +++ b/examples/go/guides/web-scraping/mock_scraper.go @@ -0,0 +1,36 @@ +package main + +import "time" + +type ScrapeResult struct { + URL string `json:"url"` + Title string `json:"title"` + Content string `json:"content"` + ScrapedAt string `json:"scraped_at"` +} + +func MockScrape(url string) ScrapeResult { + return ScrapeResult{ + URL: url, + Title: "Page: " + url, + Content: "Mock scraped content from " + url + ". In production, use Firecrawl, Browserbase, or Playwright here.", + ScrapedAt: time.Now().UTC().Format(time.RFC3339), + } +} + +func MockExtract(content string) map[string]string { + summary := content + if len(summary) > 80 { + summary = summary[:80] + } + words := 0 + for _, c := range content { + if c == ' ' { + words++ + } + } + return map[string]string{ + "summary": summary, + "word_count": string(rune(words + 1)), + } +} diff --git a/examples/go/guides/webhook-processing/main.go b/examples/go/guides/webhook-processing/main.go new file mode 100644 index 0000000000..9a6509d3f2 --- /dev/null +++ b/examples/go/guides/webhook-processing/main.go @@ -0,0 +1,57 @@ +package main + +import ( + "context" + "log" + + "github.com/hatchet-dev/hatchet/pkg/cmdutils" + hatchet "github.com/hatchet-dev/hatchet/sdks/go" +) + +type WebhookPayload struct { + EventID string `json:"event_id"` + Type string `json:"type"` + Data map[string]interface{} `json:"data"` +} + +func main() { + client, err := hatchet.NewClient() + if err != nil { + log.Fatalf("failed to create hatchet client: %v", err) + } + + // > Step 01 Define Webhook Task + task := client.NewStandaloneTask("process-webhook", func(ctx hatchet.Context, input WebhookPayload) (map[string]string, error) { + return map[string]string{"processed": input.EventID, "type": input.Type}, nil + }, hatchet.WithWorkflowEvents("webhook:stripe", "webhook:github")) + + // > Step 02 Register Webhook + // Call from your webhook endpoint to trigger the task. + forwardWebhook := func(eventKey string, payload map[string]interface{}) { + _ = client.Events().Push(context.Background(), eventKey, payload) + } + _ = forwardWebhook + + // > Step 03 Process Payload + // Validate event_id for deduplication; process idempotently. + validateAndProcess := func(input WebhookPayload) (map[string]string, error) { + if input.EventID == "" { + return nil, nil // or return error + } + return map[string]string{"processed": input.EventID, "type": input.Type}, nil + } + _ = validateAndProcess + + // > Step 04 Run Worker + worker, err := client.NewWorker("webhook-worker", hatchet.WithWorkflows(task)) + if err != nil { + log.Fatalf("failed to create worker: %v", err) + } + + interruptCtx, cancel := cmdutils.NewInterruptContext() + defer cancel() + + if err := worker.StartBlocking(interruptCtx); err != nil { + log.Fatalf("failed to start worker: %v", err) + } +} diff --git a/examples/python/guides/ai_agents/llm_service.py b/examples/python/guides/ai_agents/llm_service.py new file mode 100644 index 0000000000..27930d3008 --- /dev/null +++ b/examples/python/guides/ai_agents/llm_service.py @@ -0,0 +1,49 @@ +"""Encapsulated LLM service - swap MockLLMService for OpenAI/Anthropic in production. + +See docs: /guides/ai-agents +""" + +from abc import ABC, abstractmethod + + +class LLMService(ABC): + """Interface for LLM completion. Implement with OpenAI, Anthropic, etc.""" + + @abstractmethod + def complete(self, messages: list[dict]) -> dict: + """Complete a chat. Returns {content, tool_calls, done}.""" + pass + + +class MockLLMService(LLMService): + """No external API - for local development and tests.""" + + def __init__(self) -> None: + self._call_count: dict[str, int] = {} + + def complete(self, messages: list[dict]) -> dict: + key = "default" + self._call_count[key] = self._call_count.get(key, 0) + 1 + if self._call_count[key] == 1: + return { + "content": "", + "tool_calls": [{"name": "get_weather", "args": {"location": "SF"}}], + "done": False, + } + return {"content": "It's 72°F and sunny in SF.", "tool_calls": [], "done": True} + + +# Default: mock. Override with getenv or DI for production. +_llm_service: LLMService | None = None + + +def get_llm_service() -> LLMService: + global _llm_service + if _llm_service is None: + _llm_service = MockLLMService() + return _llm_service + + +def set_llm_service(service: LLMService) -> None: + global _llm_service + _llm_service = service diff --git a/examples/python/guides/ai_agents/mock_agent.py b/examples/python/guides/ai_agents/mock_agent.py new file mode 100644 index 0000000000..9453dd5bd2 --- /dev/null +++ b/examples/python/guides/ai_agents/mock_agent.py @@ -0,0 +1,24 @@ +"""Mock LLM and tools - no external API dependencies.""" + +_call_count: dict[str, int] = {} + + +def call_llm(messages: list[dict]) -> dict: + """Mock LLM: first call returns tool_calls, second returns final answer.""" + key = "default" + _call_count[key] = _call_count.get(key, 0) + 1 + if _call_count[key] == 1: + return { + "content": "", + "tool_calls": [{"name": "get_weather", "args": {"location": "SF"}}], + "done": False, + } + return {"content": "It's 72°F and sunny in SF.", "tool_calls": [], "done": True} + + +def run_tool(name: str, args: dict) -> str: + """Mock tool execution - returns canned results.""" + if name == "get_weather": + loc = args.get("location", "unknown") + return f"Weather in {loc}: 72°F, sunny" + return f"Unknown tool: {name}" diff --git a/examples/python/guides/ai_agents/tool_service.py b/examples/python/guides/ai_agents/tool_service.py new file mode 100644 index 0000000000..5b228d09dc --- /dev/null +++ b/examples/python/guides/ai_agents/tool_service.py @@ -0,0 +1,40 @@ +"""Encapsulated tool execution - swap MockToolService for real APIs in production. + +See docs: /guides/ai-agents +""" + +from abc import ABC, abstractmethod + + +class ToolService(ABC): + """Interface for agent tool execution. Implement with your APIs.""" + + @abstractmethod + def run(self, name: str, args: dict) -> str: + """Execute a tool. Returns string result.""" + pass + + +class MockToolService(ToolService): + """No external API - returns canned results for demos.""" + + def run(self, name: str, args: dict) -> str: + if name == "get_weather": + loc = args.get("location", "unknown") + return f"Weather in {loc}: 72°F, sunny" + return f"Unknown tool: {name}" + + +_tool_service: ToolService | None = None + + +def get_tool_service() -> ToolService: + global _tool_service + if _tool_service is None: + _tool_service = MockToolService() + return _tool_service + + +def set_tool_service(service: ToolService) -> None: + global _tool_service + _tool_service = service diff --git a/examples/python/guides/ai_agents/worker.py b/examples/python/guides/ai_agents/worker.py new file mode 100644 index 0000000000..1731ab0aee --- /dev/null +++ b/examples/python/guides/ai_agents/worker.py @@ -0,0 +1,83 @@ +from hatchet_sdk import ( + ConcurrencyExpression, + ConcurrencyLimitStrategy, + DurableContext, + EmptyModel, + Hatchet, +) + +try: + from .llm_service import get_llm_service + from .tool_service import get_tool_service +except ImportError: + from llm_service import get_llm_service + from tool_service import get_tool_service + +hatchet = Hatchet(debug=True) + + +# > Step 01 Define Agent Task +@hatchet.durable_task( + name="ReasoningLoopAgent", + concurrency=ConcurrencyExpression( + expression="input.session_id != null ? string(input.session_id) : 'constant'", + max_runs=1, + limit_strategy=ConcurrencyLimitStrategy.CANCEL_IN_PROGRESS, + ), +) +async def agent_task(input: EmptyModel, ctx: DurableContext) -> dict: + """Agent loop: reason, act, observe. Streams output, survives restarts.""" + query = "Hello" + if isinstance(input, dict) and input.get("query"): + query = str(input["query"]) + elif hasattr(input, "query") and input.query: + query = str(input.query) + return await agent_reasoning_loop(query) + + +# > Step 02 Reasoning Loop +async def agent_reasoning_loop(query: str) -> dict: + llm = get_llm_service() + tools = get_tool_service() + messages = [{"role": "user", "content": query}] + for _ in range(10): + resp = llm.complete(messages) + if resp.get("done"): + return {"response": resp["content"]} + for tc in resp.get("tool_calls", []): + result = tools.run(tc["name"], tc.get("args", {})) + messages.append({"role": "tool", "content": result}) + return {"response": "Max iterations reached"} + + +# > Step 03 Stream Response +@hatchet.durable_task( + name="StreamingAgentTask", + concurrency=ConcurrencyExpression( + expression="input.session_id != null ? string(input.session_id) : 'constant'", + max_runs=1, + limit_strategy=ConcurrencyLimitStrategy.CANCEL_IN_PROGRESS, + ), +) +async def streaming_agent(input: EmptyModel, ctx: DurableContext) -> dict: + """Stream tokens to the client as they're generated.""" + tokens = ["Hello", " ", "world", "!"] + for t in tokens: + await ctx.aio_put_stream(t) + return {"done": True} + + + + +def main() -> None: + # > Step 04 Run Worker + worker = hatchet.worker( + "agent-worker", + workflows=[agent_task, streaming_agent], + slots=5, + ) + worker.start() + + +if __name__ == "__main__": + main() diff --git a/examples/python/guides/batch_processing/worker.py b/examples/python/guides/batch_processing/worker.py new file mode 100644 index 0000000000..45c1b2ca10 --- /dev/null +++ b/examples/python/guides/batch_processing/worker.py @@ -0,0 +1,53 @@ +from typing import Any + +from hatchet_sdk import Context, Hatchet +from pydantic import BaseModel + +hatchet = Hatchet(debug=True) + + +# > Step 01 Define Parent Task +class BatchInput(BaseModel): + items: list[str] + + +class ItemInput(BaseModel): + item_id: str + + +parent_wf = hatchet.workflow(name="BatchParent", input_validator=BatchInput) +child_wf = hatchet.workflow(name="BatchChild", input_validator=ItemInput) + + +@parent_wf.durable_task() +async def spawn_children(input: BatchInput, ctx: Context) -> dict[str, Any]: + """Parent fans out to one child per item.""" + results = await child_wf.aio_run_many( + [child_wf.create_bulk_run_item(input=ItemInput(item_id=item_id)) for item_id in input.items] + ) + return {"processed": len(results), "results": results} + + + + +# > Step 03 Process Item +@child_wf.task() +async def process_item(input: ItemInput, ctx: Context) -> dict[str, str]: + """Child processes a single item.""" + return {"status": "done", "item_id": input.item_id} + + + + +def main() -> None: + # > Step 04 Run Worker + worker = hatchet.worker( + "batch-worker", + slots=20, + workflows=[parent_wf, child_wf], + ) + worker.start() + + +if __name__ == "__main__": + main() diff --git a/examples/python/guides/document_processing/llm_extract_service.py b/examples/python/guides/document_processing/llm_extract_service.py new file mode 100644 index 0000000000..218ce5e3b4 --- /dev/null +++ b/examples/python/guides/document_processing/llm_extract_service.py @@ -0,0 +1,37 @@ +"""Encapsulated LLM extraction - swap MockExtractService for OpenAI/Anthropic in production. + +See docs: /guides/document-processing +""" + +from abc import ABC, abstractmethod + + +class ExtractService(ABC): + """Interface for entity extraction from text. Implement with LLM or rules.""" + + @abstractmethod + def extract(self, text: str) -> list[str]: + """Extract entities from parsed text.""" + pass + + +class MockExtractService(ExtractService): + """No external API - returns placeholder entities for demos.""" + + def extract(self, text: str) -> list[str]: + return ["entity1", "entity2"] + + +_extract_service: ExtractService | None = None + + +def get_extract_service() -> ExtractService: + global _extract_service + if _extract_service is None: + _extract_service = MockExtractService() + return _extract_service + + +def set_extract_service(service: ExtractService) -> None: + global _extract_service + _extract_service = service diff --git a/examples/python/guides/document_processing/mock_ocr.py b/examples/python/guides/document_processing/mock_ocr.py new file mode 100644 index 0000000000..992157aadd --- /dev/null +++ b/examples/python/guides/document_processing/mock_ocr.py @@ -0,0 +1,6 @@ +"""Mock OCR/parser - no external dependencies.""" + + +def parse_document(content: bytes) -> str: + """Mock: return placeholder text instead of real OCR.""" + return f"Parsed text from {len(content)} bytes" diff --git a/examples/python/guides/document_processing/ocr_service.py b/examples/python/guides/document_processing/ocr_service.py new file mode 100644 index 0000000000..ace102dbd7 --- /dev/null +++ b/examples/python/guides/document_processing/ocr_service.py @@ -0,0 +1,37 @@ +"""Encapsulated OCR service - swap MockOCRService for Tesseract/Google Vision in production. + +See docs: /guides/document-processing +""" + +from abc import ABC, abstractmethod + + +class OCRService(ABC): + """Interface for document parsing. Implement with Tesseract, Google Vision, etc.""" + + @abstractmethod + def parse(self, content: bytes) -> str: + """Convert raw bytes (image/PDF) to text.""" + pass + + +class MockOCRService(OCRService): + """No external API - returns placeholder for demos.""" + + def parse(self, content: bytes) -> str: + return f"Parsed text from {len(content)} bytes" + + +_ocr_service: OCRService | None = None + + +def get_ocr_service() -> OCRService: + global _ocr_service + if _ocr_service is None: + _ocr_service = MockOCRService() + return _ocr_service + + +def set_ocr_service(service: OCRService) -> None: + global _ocr_service + _ocr_service = service diff --git a/examples/python/guides/document_processing/worker.py b/examples/python/guides/document_processing/worker.py new file mode 100644 index 0000000000..ab5657def5 --- /dev/null +++ b/examples/python/guides/document_processing/worker.py @@ -0,0 +1,62 @@ +from typing import Any + +from hatchet_sdk import Context, Hatchet +from pydantic import BaseModel + +try: + from .llm_extract_service import get_extract_service + from .ocr_service import get_ocr_service +except ImportError: + from llm_extract_service import get_extract_service + from ocr_service import get_ocr_service + +hatchet = Hatchet(debug=True) + + +# > Step 01 Define DAG +class DocInput(BaseModel): + doc_id: str + content: bytes = b"" + + +doc_wf = hatchet.workflow(name="DocumentPipeline", input_validator=DocInput) + + +@doc_wf.task() +async def ingest(input: DocInput, ctx: Context) -> dict[str, Any]: + return {"doc_id": input.doc_id, "content": input.content} + + + + +# > Step 02 Parse Stage +@doc_wf.task(parents=[ingest]) +async def parse(input: DocInput, ctx: Context) -> dict[str, Any]: + ingested = ctx.task_output(ingest) + text = get_ocr_service().parse(ingested["content"]) + return {"doc_id": input.doc_id, "text": text} + + + + +# > Step 03 Extract Stage +@doc_wf.task(parents=[parse]) +async def extract(input: DocInput, ctx: Context) -> dict[str, Any]: + parsed = ctx.task_output(parse) + entities = get_extract_service().extract(parsed["text"]) + return {"doc_id": parsed["doc_id"], "entities": entities} + + + + +def main() -> None: + # > Step 04 Run Worker + worker = hatchet.worker( + "document-worker", + workflows=[doc_wf], + ) + worker.start() + + +if __name__ == "__main__": + main() diff --git a/examples/python/guides/evaluator_optimizer/mock_llm.py b/examples/python/guides/evaluator_optimizer/mock_llm.py new file mode 100644 index 0000000000..236ce6e5fb --- /dev/null +++ b/examples/python/guides/evaluator_optimizer/mock_llm.py @@ -0,0 +1,17 @@ +"""Mock LLM for evaluator-optimizer - no external API dependencies.""" + +_generate_count = 0 + + +def mock_generate(prompt: str) -> str: + global _generate_count + _generate_count += 1 + if _generate_count == 1: + return "Check out our product! Buy now!" + return "Discover how our tool saves teams 10 hours/week. Try it free." + + +def mock_evaluate(draft: str) -> dict: + if len(draft) < 40: + return {"score": 0.4, "feedback": "Too short and pushy. Add a specific benefit and soften the CTA."} + return {"score": 0.9, "feedback": "Clear value prop, appropriate tone."} diff --git a/examples/python/guides/evaluator_optimizer/worker.py b/examples/python/guides/evaluator_optimizer/worker.py new file mode 100644 index 0000000000..7933f88ad2 --- /dev/null +++ b/examples/python/guides/evaluator_optimizer/worker.py @@ -0,0 +1,71 @@ +from hatchet_sdk import Context, DurableContext, EmptyModel, Hatchet + +try: + from .mock_llm import mock_evaluate, mock_generate +except ImportError: + from mock_llm import mock_evaluate, mock_generate + +hatchet = Hatchet(debug=True) + +generator_wf = hatchet.workflow(name="GenerateDraft") +evaluator_wf = hatchet.workflow(name="EvaluateDraft") + + +# > Step 01 Define Tasks +@generator_wf.task() +async def generate_draft(input: dict, ctx: Context) -> dict: + prompt = ( + f"Improve this draft.\n\nDraft: {input['previous_draft']}\nFeedback: {input['feedback']}" + if input.get("feedback") + else f"Write a social media post about \"{input['topic']}\" for {input['audience']}. Under 100 words." + ) + return {"draft": mock_generate(prompt)} + + +@evaluator_wf.task() +async def evaluate_draft(input: dict, ctx: Context) -> dict: + return mock_evaluate(input["draft"]) + + +# > Step 02 Optimization Loop +@hatchet.durable_task(name="EvaluatorOptimizer", execution_timeout="5m") +async def evaluator_optimizer(input: EmptyModel, ctx: DurableContext) -> dict: + max_iterations = 3 + threshold = 0.8 + draft = "" + feedback = "" + + for i in range(max_iterations): + generated = await generator_wf.aio_run( + input={ + "topic": input["topic"], + "audience": input["audience"], + "previous_draft": draft or None, + "feedback": feedback or None, + } + ) + draft = generated["draft"] + + evaluation = await evaluator_wf.aio_run( + input={"draft": draft, "topic": input["topic"], "audience": input["audience"]} + ) + + if evaluation["score"] >= threshold: + return {"draft": draft, "iterations": i + 1, "score": evaluation["score"]} + feedback = evaluation["feedback"] + + return {"draft": draft, "iterations": max_iterations, "score": -1} + + +def main() -> None: + # > Step 03 Run Worker + worker = hatchet.worker( + "evaluator-optimizer-worker", + workflows=[generator_wf, evaluator_wf, evaluator_optimizer], + slots=5, + ) + worker.start() + + +if __name__ == "__main__": + main() diff --git a/examples/python/guides/event_driven/trigger.py b/examples/python/guides/event_driven/trigger.py new file mode 100644 index 0000000000..0161e45ce9 --- /dev/null +++ b/examples/python/guides/event_driven/trigger.py @@ -0,0 +1,11 @@ +from hatchet_sdk import Hatchet + +hatchet = Hatchet(debug=True) + + +# > Step 03 Push Event +# Push an event to trigger the workflow. Use the same key as on_events. +hatchet.event.push( + "order:created", + {"message": "Order #1234", "source": "webhook"}, +) diff --git a/examples/python/guides/event_driven/worker.py b/examples/python/guides/event_driven/worker.py new file mode 100644 index 0000000000..d7a974eb93 --- /dev/null +++ b/examples/python/guides/event_driven/worker.py @@ -0,0 +1,43 @@ +from hatchet_sdk import Context, Hatchet +from pydantic import BaseModel + +hatchet = Hatchet(debug=True) + + +# > Step 01 Define Event Task +class EventInput(BaseModel): + message: str + source: str = "api" + + +event_wf = hatchet.workflow( + name="EventDrivenWorkflow", + input_validator=EventInput, + on_events=["order:created", "user:signup"], +) + + +@event_wf.task() +async def process_event(input: EventInput, ctx: Context) -> dict: + return {"processed": input.message, "source": input.source} + + + + +# > Step 02 Register Event Trigger +def push_order_event(): + """Push an event to trigger the workflow. Use the same key as on_events.""" + hatchet.event.push("order:created", {"message": "Order #1234", "source": "webhook"}) + + +def main() -> None: + # > Step 04 Run Worker + worker = hatchet.worker( + "event-driven-worker", + workflows=[event_wf], + ) + worker.start() + + +if __name__ == "__main__": + main() diff --git a/examples/python/guides/human_in_the_loop/trigger.py b/examples/python/guides/human_in_the_loop/trigger.py new file mode 100644 index 0000000000..4d4709667b --- /dev/null +++ b/examples/python/guides/human_in_the_loop/trigger.py @@ -0,0 +1,16 @@ +from hatchet_sdk import Hatchet + +hatchet = Hatchet(debug=True) + + +# > Step 03 Push Approval Event +# Include the run_id so the event matches the specific task waiting for it. +def push_approval(run_id: str, approved: bool, reason: str = "") -> None: + hatchet.event.push( + "approval:decision", + {"runId": run_id, "approved": approved, "reason": reason}, + ) + + +# Approve: push_approval("run-id-from-ui", True) +# Reject: push_approval("run-id-from-ui", False, reason="needs review") diff --git a/examples/python/guides/human_in_the_loop/worker.py b/examples/python/guides/human_in_the_loop/worker.py new file mode 100644 index 0000000000..55bf6ea428 --- /dev/null +++ b/examples/python/guides/human_in_the_loop/worker.py @@ -0,0 +1,41 @@ +from hatchet_sdk import DurableContext, EmptyModel, Hatchet, UserEventCondition + +hatchet = Hatchet(debug=True) + +APPROVAL_EVENT_KEY = "approval:decision" + + +# > Step 02 Wait For Event +async def wait_for_approval(ctx: DurableContext) -> dict: + run_id = ctx.workflow_run_id + approval = await ctx.aio_wait_for( + "approval", + UserEventCondition( + event_key=APPROVAL_EVENT_KEY, + expression=f"input.runId == '{run_id}'", + ), + ) + return approval + + +# > Step 01 Define Approval Task +@hatchet.durable_task(name="ApprovalTask") +async def approval_task(input: EmptyModel, ctx: DurableContext) -> dict: + proposed_action = {"action": "send_email", "to": "user@example.com"} + approval = await wait_for_approval(ctx) + if approval.get("approved"): + return {"status": "approved", "action": proposed_action} + return {"status": "rejected", "reason": approval.get("reason", "")} + + +def main() -> None: + # > Step 04 Run Worker + worker = hatchet.worker( + "human-in-the-loop-worker", + workflows=[approval_task], + ) + worker.start() + + +if __name__ == "__main__": + main() diff --git a/examples/python/guides/integrations/embedding_cohere.py b/examples/python/guides/integrations/embedding_cohere.py new file mode 100644 index 0000000000..77c3c35e64 --- /dev/null +++ b/examples/python/guides/integrations/embedding_cohere.py @@ -0,0 +1,12 @@ +# Third-party integration example - requires: pip install cohere +# See: /guides/rag-and-indexing + +import cohere + +client = cohere.Client() + + +# > Cohere embedding usage +def embed(text: str) -> list[float]: + r = client.embed(texts=[text], model="embed-english-v3.0", input_type="search_document") + return list(r.embeddings[0]) diff --git a/examples/python/guides/integrations/embedding_openai.py b/examples/python/guides/integrations/embedding_openai.py new file mode 100644 index 0000000000..0ec6179154 --- /dev/null +++ b/examples/python/guides/integrations/embedding_openai.py @@ -0,0 +1,12 @@ +# Third-party integration example - requires: pip install openai +# See: /guides/rag-and-indexing + +from openai import OpenAI + +client = OpenAI() + + +# > OpenAI embedding usage +def embed(text: str) -> list[float]: + r = client.embeddings.create(model="text-embedding-3-small", input=text) + return r.data[0].embedding diff --git a/examples/python/guides/integrations/llm_anthropic.py b/examples/python/guides/integrations/llm_anthropic.py new file mode 100644 index 0000000000..98b2a5e765 --- /dev/null +++ b/examples/python/guides/integrations/llm_anthropic.py @@ -0,0 +1,21 @@ +# Third-party integration example - requires: pip install anthropic +# See: /guides/ai-agents + +from anthropic import Anthropic + +client = Anthropic() + + +# > Anthropic usage +def complete(messages: list[dict]) -> dict: + resp = client.messages.create( + model="claude-3-5-haiku-20241022", + max_tokens=1024, + messages=[{"role": m["role"], "content": m["content"]} for m in messages], + tools=[{"name": "get_weather", "description": "Get weather", "input_schema": {"type": "object", "properties": {"location": {"type": "string"}}}}], + ) + for block in resp.content: + if block.type == "tool_use": + return {"content": "", "tool_calls": [{"name": block.name, "args": block.input}], "done": False} + text = "".join(b.text for b in resp.content if hasattr(b, "text")) + return {"content": text, "tool_calls": [], "done": True} diff --git a/examples/python/guides/integrations/llm_groq.py b/examples/python/guides/integrations/llm_groq.py new file mode 100644 index 0000000000..6db7285cbb --- /dev/null +++ b/examples/python/guides/integrations/llm_groq.py @@ -0,0 +1,37 @@ +# Third-party integration - requires: pip install groq +# See: /guides/ai-agents + +import json + +from groq import Groq + +client = Groq() + + +# > Groq usage +def complete(messages: list[dict]) -> dict: + r = client.chat.completions.create( + model="llama-3.3-70b-versatile", + messages=messages, + tool_choice="auto", + tools=[ + { + "type": "function", + "function": { + "name": "get_weather", + "description": "Get weather for a location", + "parameters": { + "type": "object", + "properties": {"location": {"type": "string"}}, + "required": ["location"], + }, + }, + } + ], + ) + msg = r.choices[0].message + tool_calls = [ + {"name": tc.function.name, "args": json.loads(tc.function.arguments or "{}")} + for tc in (msg.tool_calls or []) + ] + return {"content": msg.content or "", "tool_calls": tool_calls, "done": not tool_calls} diff --git a/examples/python/guides/integrations/llm_ollama.py b/examples/python/guides/integrations/llm_ollama.py new file mode 100644 index 0000000000..092976a5cb --- /dev/null +++ b/examples/python/guides/integrations/llm_ollama.py @@ -0,0 +1,12 @@ +# Third-party integration example - requires: pip install ollama; ollama run llama2 +# See: /guides/ai-agents + +import ollama + + +# > Ollama usage +def complete(messages: list[dict]) -> dict: + resp = ollama.chat(model="llama2", messages=messages) + content = resp.get("message", {}).get("content", "") + tool_calls = resp.get("message", {}).get("tool_calls") or [] + return {"content": content, "tool_calls": tool_calls, "done": not tool_calls} diff --git a/examples/python/guides/integrations/llm_openai.py b/examples/python/guides/integrations/llm_openai.py new file mode 100644 index 0000000000..b7be7a8d23 --- /dev/null +++ b/examples/python/guides/integrations/llm_openai.py @@ -0,0 +1,37 @@ +# Third-party integration example - requires: pip install openai +# See: /guides/ai-agents + +import json + +from openai import OpenAI + +client = OpenAI() + + +# > OpenAI usage +def complete(messages: list[dict]) -> dict: + r = client.chat.completions.create( + model="gpt-4o-mini", + messages=messages, + tool_choice="auto", + tools=[ + { + "type": "function", + "function": { + "name": "get_weather", + "description": "Get weather for a location", + "parameters": { + "type": "object", + "properties": {"location": {"type": "string"}}, + "required": ["location"], + }, + }, + } + ], + ) + msg = r.choices[0].message + tool_calls = [ + {"name": tc.function.name, "args": json.loads(tc.function.arguments or "{}")} + for tc in (msg.tool_calls or []) + ] + return {"content": msg.content or "", "tool_calls": tool_calls, "done": not tool_calls} diff --git a/examples/python/guides/integrations/ocr_google_vision.py b/examples/python/guides/integrations/ocr_google_vision.py new file mode 100644 index 0000000000..c10dce4fa9 --- /dev/null +++ b/examples/python/guides/integrations/ocr_google_vision.py @@ -0,0 +1,13 @@ +# Third-party integration example - requires: pip install google-cloud-vision +# See: /guides/document-processing + +from google.cloud import vision + +client = vision.ImageAnnotatorClient() + + +# > Google Vision usage +def parse_document(content: bytes) -> str: + image = vision.Image(content=content) + response = client.document_text_detection(image=image) + return response.full_text_annotation.text if response.full_text_annotation else "" diff --git a/examples/python/guides/integrations/ocr_reducto.py b/examples/python/guides/integrations/ocr_reducto.py new file mode 100644 index 0000000000..211d3ad12d --- /dev/null +++ b/examples/python/guides/integrations/ocr_reducto.py @@ -0,0 +1,14 @@ +# Third-party integration - requires: pip install reductoai +# See: /guides/document-processing +# Reducto: parse PDFs/images to structured content, extract with schema/prompt + +from reducto import Reducto + +client = Reducto() + + +# > Reducto usage +def parse_document(content: bytes) -> str: + upload = client.upload.upload(file=content, extension=".pdf") + result = client.parse.parse(input=upload.url) + return str(result) # or access result.blocks, result.tables, etc. diff --git a/examples/python/guides/integrations/ocr_tesseract.py b/examples/python/guides/integrations/ocr_tesseract.py new file mode 100644 index 0000000000..8c8c4621a3 --- /dev/null +++ b/examples/python/guides/integrations/ocr_tesseract.py @@ -0,0 +1,13 @@ +# Third-party integration example - requires: pip install pytesseract; install Tesseract binary +# See: /guides/document-processing + +import io + +import pytesseract +from PIL import Image + + +# > Tesseract usage +def parse_document(content: bytes) -> str: + img = Image.open(io.BytesIO(content)) + return pytesseract.image_to_string(img) diff --git a/examples/python/guides/integrations/ocr_unstructured.py b/examples/python/guides/integrations/ocr_unstructured.py new file mode 100644 index 0000000000..4284cbe38c --- /dev/null +++ b/examples/python/guides/integrations/ocr_unstructured.py @@ -0,0 +1,11 @@ +# Third-party integration - requires: pip install "unstructured[pdf]" +# See: /guides/document-processing +# Unstructured: open-source doc parsing for RAG, supports PDF, DOCX, images, etc. + +import io +from unstructured.partition.auto import partition + +# > Unstructured usage +def parse_document(content: bytes) -> str: + elements = partition(file=io.BytesIO(content)) + return "\n\n".join(str(el) for el in elements) diff --git a/examples/python/guides/integrations/scraper_browserbase.py b/examples/python/guides/integrations/scraper_browserbase.py new file mode 100644 index 0000000000..1128b9cc16 --- /dev/null +++ b/examples/python/guides/integrations/scraper_browserbase.py @@ -0,0 +1,21 @@ +# Third-party integration example - requires: pip install playwright browserbase +# See: /guides/web-scraping + +import os + +from browserbase import Browserbase +from playwright.async_api import async_playwright + +bb = Browserbase(api_key=os.environ["BROWSERBASE_API_KEY"]) + + +# > Browserbase usage +async def scrape_url(url: str) -> dict: + session = bb.sessions.create(project_id=os.environ["BROWSERBASE_PROJECT_ID"]) + async with async_playwright() as pw: + browser = await pw.chromium.connect_over_cdp(session.connect_url) + page = browser.contexts[0].pages[0] + await page.goto(url) + content = await page.content() + await browser.close() + return {"url": url, "content": content} diff --git a/examples/python/guides/integrations/scraper_firecrawl.py b/examples/python/guides/integrations/scraper_firecrawl.py new file mode 100644 index 0000000000..a47fa7e117 --- /dev/null +++ b/examples/python/guides/integrations/scraper_firecrawl.py @@ -0,0 +1,18 @@ +# Third-party integration example - requires: pip install firecrawl-py +# See: /guides/web-scraping + +import os + +from firecrawl import FirecrawlApp + +firecrawl = FirecrawlApp(api_key=os.environ["FIRECRAWL_API_KEY"]) + + +# > Firecrawl usage +def scrape_url(url: str) -> dict: + result = firecrawl.scrape_url(url, params={"formats": ["markdown"]}) + return { + "url": url, + "content": result["markdown"], + "metadata": result.get("metadata", {}), + } diff --git a/examples/python/guides/integrations/scraper_openai.py b/examples/python/guides/integrations/scraper_openai.py new file mode 100644 index 0000000000..c512c4eb6f --- /dev/null +++ b/examples/python/guides/integrations/scraper_openai.py @@ -0,0 +1,16 @@ +# Third-party integration example - requires: pip install openai +# See: /guides/web-scraping + +from openai import OpenAI + +client = OpenAI() + + +# > OpenAI web search usage +def search_and_extract(query: str) -> dict: + response = client.responses.create( + model="gpt-4o-mini", + tools=[{"type": "web_search"}], + input=query, + ) + return {"query": query, "content": response.output_text} diff --git a/examples/python/guides/integrations/scraper_playwright.py b/examples/python/guides/integrations/scraper_playwright.py new file mode 100644 index 0000000000..2f14702777 --- /dev/null +++ b/examples/python/guides/integrations/scraper_playwright.py @@ -0,0 +1,15 @@ +# Third-party integration example - requires: pip install playwright && playwright install +# See: /guides/web-scraping + +from playwright.async_api import async_playwright + + +# > Playwright usage +async def scrape_url(url: str) -> dict: + async with async_playwright() as pw: + browser = await pw.chromium.launch(headless=True) + page = await browser.new_page() + await page.goto(url) + content = await page.content() + await browser.close() + return {"url": url, "content": content} diff --git a/examples/python/guides/llm_pipelines/llm_service.py b/examples/python/guides/llm_pipelines/llm_service.py new file mode 100644 index 0000000000..e619e264ec --- /dev/null +++ b/examples/python/guides/llm_pipelines/llm_service.py @@ -0,0 +1,37 @@ +"""Encapsulated LLM service - swap MockLLMService for OpenAI/Anthropic in production. + +See docs: /guides/llm-pipelines +""" + +from abc import ABC, abstractmethod + + +class LLMService(ABC): + """Interface for LLM generation. Implement with OpenAI, Anthropic, etc.""" + + @abstractmethod + def generate(self, prompt: str) -> dict: + """Generate from prompt. Returns {content, valid}.""" + pass + + +class MockLLMService(LLMService): + """No external API - for demos.""" + + def generate(self, prompt: str) -> dict: + return {"content": f"Generated for: {prompt[:50]}...", "valid": True} + + +_llm_service: LLMService | None = None + + +def get_llm_service() -> LLMService: + global _llm_service + if _llm_service is None: + _llm_service = MockLLMService() + return _llm_service + + +def set_llm_service(service: LLMService) -> None: + global _llm_service + _llm_service = service diff --git a/examples/python/guides/llm_pipelines/mock_llm.py b/examples/python/guides/llm_pipelines/mock_llm.py new file mode 100644 index 0000000000..dbcf31b808 --- /dev/null +++ b/examples/python/guides/llm_pipelines/mock_llm.py @@ -0,0 +1,11 @@ +"""Mock LLM client - no external API dependencies.""" + + +def generate(prompt: str) -> dict: + """Mock: return placeholder instead of calling real LLM.""" + return {"content": f"Generated for: {prompt[:50]}...", "valid": True} + + +def validate(output: dict) -> bool: + """Mock: always valid.""" + return output.get("valid", False) diff --git a/examples/python/guides/llm_pipelines/worker.py b/examples/python/guides/llm_pipelines/worker.py new file mode 100644 index 0000000000..8ca16f2da0 --- /dev/null +++ b/examples/python/guides/llm_pipelines/worker.py @@ -0,0 +1,54 @@ +from hatchet_sdk import Context, Hatchet +from pydantic import BaseModel + +try: + from .llm_service import get_llm_service +except ImportError: + from llm_service import get_llm_service + +hatchet = Hatchet(debug=True) + + +# > Step 01 Define Pipeline +class PipelineInput(BaseModel): + prompt: str + + +llm_wf = hatchet.workflow(name="LLMPipeline", input_validator=PipelineInput) + + +@llm_wf.task() +async def prompt_task(input: PipelineInput, ctx: Context) -> dict: + return {"prompt": input.prompt} + + + + +# > Step 02 Prompt Task +def _build_prompt(user_input: str, context: str = "") -> str: + return f"Process the following: {user_input}" + (f"\nContext: {context}" if context else "") + + +# > Step 03 Validate Task +@llm_wf.task(parents=[prompt_task]) +async def generate_task(input: PipelineInput, ctx: Context) -> dict: + prev = ctx.task_output(prompt_task) + output = get_llm_service().generate(prev["prompt"]) + if not output.get("valid"): + raise ValueError("Validation failed") + return output + + + + +def main() -> None: + # > Step 04 Run Worker + worker = hatchet.worker( + "llm-pipeline-worker", + workflows=[llm_wf], + ) + worker.start() + + +if __name__ == "__main__": + main() diff --git a/examples/python/guides/multi_agent/mock_llm.py b/examples/python/guides/multi_agent/mock_llm.py new file mode 100644 index 0000000000..6f948889e8 --- /dev/null +++ b/examples/python/guides/multi_agent/mock_llm.py @@ -0,0 +1,17 @@ +"""Mock LLM for multi-agent orchestration - no external API dependencies.""" + +_orchestrator_call_count = 0 + + +def mock_orchestrator_llm(messages: list[dict]) -> dict: + global _orchestrator_call_count + _orchestrator_call_count += 1 + if _orchestrator_call_count == 1: + return {"done": False, "content": "", "tool_call": {"name": "research", "args": {"task": "Find key facts about the topic"}}} + if _orchestrator_call_count == 2: + return {"done": False, "content": "", "tool_call": {"name": "writing", "args": {"task": "Write a summary from the research"}}} + return {"done": True, "content": "Here is the final report combining research and writing."} + + +def mock_specialist_llm(task: str, role: str) -> str: + return f"[{role}] Completed: {task}" diff --git a/examples/python/guides/multi_agent/worker.py b/examples/python/guides/multi_agent/worker.py new file mode 100644 index 0000000000..d006894c11 --- /dev/null +++ b/examples/python/guides/multi_agent/worker.py @@ -0,0 +1,71 @@ +from hatchet_sdk import DurableContext, EmptyModel, Hatchet + +try: + from .mock_llm import mock_orchestrator_llm, mock_specialist_llm +except ImportError: + from mock_llm import mock_orchestrator_llm, mock_specialist_llm + +hatchet = Hatchet(debug=True) + + +# > Step 01 Specialist Agents +@hatchet.durable_task(name="ResearchSpecialist", execution_timeout="3m") +async def research(input: EmptyModel, ctx: DurableContext) -> dict: + return {"result": mock_specialist_llm(input["task"], "research")} + + +@hatchet.durable_task(name="WritingSpecialist", execution_timeout="2m") +async def write(input: EmptyModel, ctx: DurableContext) -> dict: + return {"result": mock_specialist_llm(input["task"], "writing")} + + +@hatchet.durable_task(name="CodeSpecialist", execution_timeout="2m") +async def code(input: EmptyModel, ctx: DurableContext) -> dict: + return {"result": mock_specialist_llm(input["task"], "code")} + + +specialists = { + "research": research, + "writing": write, + "code": code, +} + + +# > Step 02 Orchestrator Loop +@hatchet.durable_task(name="MultiAgentOrchestrator", execution_timeout="15m") +async def orchestrator(input: EmptyModel, ctx: DurableContext) -> dict: + messages = [{"role": "user", "content": input["goal"]}] + + for _ in range(10): + response = mock_orchestrator_llm(messages) + + if response["done"]: + return {"result": response["content"]} + + specialist = specialists.get(response["tool_call"]["name"]) + if not specialist: + raise ValueError(f"Unknown specialist: {response['tool_call']['name']}") + + result = await specialist.aio_run(input={ + "task": response["tool_call"]["args"]["task"], + "context": "\n".join(m["content"] for m in messages), + }) + + messages.append({"role": "assistant", "content": f"Called {response['tool_call']['name']}"}) + messages.append({"role": "tool", "content": result["result"]}) + + return {"result": "Max iterations reached"} + + +def main() -> None: + # > Step 03 Run Worker + worker = hatchet.worker( + "multi-agent-worker", + workflows=[research, write, code, orchestrator], + slots=10, + ) + worker.start() + + +if __name__ == "__main__": + main() diff --git a/examples/python/guides/parallelization/mock_llm.py b/examples/python/guides/parallelization/mock_llm.py new file mode 100644 index 0000000000..b1ad21555f --- /dev/null +++ b/examples/python/guides/parallelization/mock_llm.py @@ -0,0 +1,16 @@ +"""Mock LLM for parallelization - no external API dependencies.""" + + +def mock_generate_content(message: str) -> str: + return f"Here is a helpful response to: {message}" + + +def mock_safety_check(message: str) -> dict: + if "unsafe" in message.lower(): + return {"safe": False, "reason": "Content flagged as potentially unsafe."} + return {"safe": True, "reason": "Content is appropriate."} + + +def mock_evaluate(content: str) -> dict: + score = 0.85 if len(content) > 20 else 0.3 + return {"score": score, "approved": score >= 0.7} diff --git a/examples/python/guides/parallelization/worker.py b/examples/python/guides/parallelization/worker.py new file mode 100644 index 0000000000..3a1270c3c3 --- /dev/null +++ b/examples/python/guides/parallelization/worker.py @@ -0,0 +1,72 @@ +import asyncio + +from hatchet_sdk import Context, DurableContext, EmptyModel, Hatchet + +try: + from .mock_llm import mock_evaluate, mock_generate_content, mock_safety_check +except ImportError: + from mock_llm import mock_evaluate, mock_generate_content, mock_safety_check + +hatchet = Hatchet(debug=True) + +content_wf = hatchet.workflow(name="GenerateContent") +safety_wf = hatchet.workflow(name="SafetyCheck") +evaluator_wf = hatchet.workflow(name="EvaluateContent") + + +# > Step 01 Parallel Tasks +@content_wf.task() +async def generate_content(input: dict, ctx: Context) -> dict: + return {"content": mock_generate_content(input["message"])} + + +@safety_wf.task() +async def safety_check(input: dict, ctx: Context) -> dict: + return mock_safety_check(input["message"]) + + +@evaluator_wf.task() +async def evaluate_content(input: dict, ctx: Context) -> dict: + return mock_evaluate(input["content"]) + + +# > Step 02 Sectioning +@hatchet.durable_task(name="ParallelSectioning", execution_timeout="2m") +async def sectioning_task(input: EmptyModel, ctx: DurableContext) -> dict: + content_result, safety_result = await asyncio.gather( + content_wf.aio_run(input={"message": input["message"]}), + safety_wf.aio_run(input={"message": input["message"]}), + ) + + if not safety_result["safe"]: + return {"blocked": True, "reason": safety_result["reason"]} + return {"blocked": False, "content": content_result["content"]} + + +# > Step 03 Voting +@hatchet.durable_task(name="ParallelVoting", execution_timeout="3m") +async def voting_task(input: EmptyModel, ctx: DurableContext) -> dict: + votes = await asyncio.gather( + evaluator_wf.aio_run(input={"content": input["content"]}), + evaluator_wf.aio_run(input={"content": input["content"]}), + evaluator_wf.aio_run(input={"content": input["content"]}), + ) + + approvals = sum(1 for v in votes if v["approved"]) + avg_score = sum(v["score"] for v in votes) / len(votes) + + return {"approved": approvals >= 2, "average_score": avg_score, "votes": len(votes)} + + +def main() -> None: + # > Step 04 Run Worker + worker = hatchet.worker( + "parallelization-worker", + workflows=[content_wf, safety_wf, evaluator_wf, sectioning_task, voting_task], + slots=10, + ) + worker.start() + + +if __name__ == "__main__": + main() diff --git a/examples/python/guides/pyproject.toml b/examples/python/guides/pyproject.toml new file mode 100644 index 0000000000..da43ca3986 --- /dev/null +++ b/examples/python/guides/pyproject.toml @@ -0,0 +1,41 @@ +[tool.poetry] +name = "hatchet-guides-python" +version = "0.0.0" +description = "Hatchet guide examples (Python) - docs snippets with integration deps" +package-mode = false + +[tool.poetry.dependencies] +python = "^3.10" +hatchet-sdk = "^1.28.0" +# LLM integrations +openai = "^1.0.0" +anthropic = "^0.39.0" +groq = "^0.9.0" +ollama = "^0.3.0" +# Embedding integrations +cohere = "^5.0.0" +# OCR integrations +pytesseract = "^0.3.10" +Pillow = "^10.0.0" +reductoai = "^0.16.0" +# Unstructured excluded - heavy deps (onnx) may not install on all platforms +# Scraper integrations +playwright = "^1.49.0" +firecrawl-py = "^0.0.16" +browserbase = "^1.4.0" + +[tool.poetry.group.dev.dependencies] +ruff = "^0.15.0" + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" + +[tool.ruff] +target-version = "py310" +exclude = ["integrations/ocr_unstructured.py"] # needs unstructured (heavy deps) +line-length = 100 + +[tool.ruff.lint] +select = ["E", "F", "I", "N", "W"] +ignore = ["E501"] # line length - doc examples can be long diff --git a/examples/python/guides/rag_indexing/embedding_service.py b/examples/python/guides/rag_indexing/embedding_service.py new file mode 100644 index 0000000000..c081d84e9a --- /dev/null +++ b/examples/python/guides/rag_indexing/embedding_service.py @@ -0,0 +1,40 @@ +"""Encapsulated embedding service - swap MockEmbeddingService for OpenAI/Cohere in production. + +See docs: /guides/rag-and-indexing +""" + +from abc import ABC, abstractmethod + + +class EmbeddingService(ABC): + """Interface for text embeddings. Implement with OpenAI, Cohere, etc.""" + + @abstractmethod + def embed(self, text: str) -> list[float]: + """Convert text to embedding vector.""" + pass + + +class MockEmbeddingService(EmbeddingService): + """No external API - returns placeholder vectors for demos.""" + + def __init__(self, dim: int = 64) -> None: + self.dim = dim + + def embed(self, text: str) -> list[float]: + return [0.1] * self.dim + + +_embedding_service: EmbeddingService | None = None + + +def get_embedding_service() -> EmbeddingService: + global _embedding_service + if _embedding_service is None: + _embedding_service = MockEmbeddingService() + return _embedding_service + + +def set_embedding_service(service: EmbeddingService) -> None: + global _embedding_service + _embedding_service = service diff --git a/examples/python/guides/rag_indexing/mock_embedding.py b/examples/python/guides/rag_indexing/mock_embedding.py new file mode 100644 index 0000000000..1a22b0f560 --- /dev/null +++ b/examples/python/guides/rag_indexing/mock_embedding.py @@ -0,0 +1,6 @@ +"""Mock embedding client - no external API dependencies.""" + + +def embed(text: str) -> list[float]: + """Mock: return fake embedding vector.""" + return [0.1] * 64 diff --git a/examples/python/guides/rag_indexing/worker.py b/examples/python/guides/rag_indexing/worker.py new file mode 100644 index 0000000000..13e6fe0d61 --- /dev/null +++ b/examples/python/guides/rag_indexing/worker.py @@ -0,0 +1,75 @@ +from typing import Any + +from hatchet_sdk import Context, Hatchet +from pydantic import BaseModel + +try: + from .embedding_service import get_embedding_service +except ImportError: + from embedding_service import get_embedding_service + +hatchet = Hatchet(debug=True) + + +# > Step 01 Define Workflow +class DocInput(BaseModel): + doc_id: str + content: str + + +rag_wf = hatchet.workflow(name="RAGPipeline", input_validator=DocInput) + + +# > Step 02 Define Ingest Task +@rag_wf.task() +async def ingest(input: DocInput, ctx: Context) -> dict[str, Any]: + return {"doc_id": input.doc_id, "content": input.content} + + + + +# > Step 03 Chunk Task +def _chunk_content(content: str, chunk_size: int = 100) -> list[str]: + return [content[i : i + chunk_size] for i in range(0, len(content), chunk_size)] + + +# > Step 04 Embed Task +@hatchet.task(name="embed-chunk") +async def embed_chunk(input: dict, ctx: Context) -> dict[str, Any]: + embedder = get_embedding_service() + return {"vector": embedder.embed(input["chunk"])} + + +@rag_wf.durable_task(parents=[ingest]) +async def chunk_and_embed(input: DocInput, ctx: Context) -> dict[str, Any]: + ingested = ctx.task_output(ingest) + chunks = [ingested["content"][i : i + 100] for i in range(0, len(ingested["content"]), 100)] + results = await embed_chunk.aio_run_many( + [embed_chunk.create_bulk_run_item(input={"chunk": c}) for c in chunks] + ) + return {"doc_id": ingested["doc_id"], "vectors": [r["vector"] for r in results]} + + + + +# > Step 05 Query Task +@hatchet.durable_task(name="rag-query") +async def query_task(input: dict, ctx: Context) -> dict[str, Any]: + result = await embed_chunk.aio_run(input={"chunk": input["query"]}) + # Replace with a real vector DB lookup in production + return {"query": input["query"], "vector": result["vector"], "results": []} + + + + +def main() -> None: + # > Step 06 Run Worker + worker = hatchet.worker( + "rag-worker", + workflows=[rag_wf, embed_chunk, query_task], + ) + worker.start() + + +if __name__ == "__main__": + main() diff --git a/examples/python/guides/routing/mock_classifier.py b/examples/python/guides/routing/mock_classifier.py new file mode 100644 index 0000000000..44c894fa6f --- /dev/null +++ b/examples/python/guides/routing/mock_classifier.py @@ -0,0 +1,18 @@ +"""Mock classifier - no external API dependencies.""" + + +def mock_classify(message: str) -> str: + lower = message.lower() + if any(w in lower for w in ("bug", "error", "help")): + return "support" + if any(w in lower for w in ("price", "buy", "plan")): + return "sales" + return "other" + + +def mock_reply(message: str, role: str) -> str: + if role == "support": + return f"[Support] I can help with that technical issue. Let me look into: {message}" + if role == "sales": + return f"[Sales] Great question about pricing! Here's what I can tell you about: {message}" + return f"[General] Thanks for reaching out. Regarding: {message}" diff --git a/examples/python/guides/routing/worker.py b/examples/python/guides/routing/worker.py new file mode 100644 index 0000000000..9db38ba056 --- /dev/null +++ b/examples/python/guides/routing/worker.py @@ -0,0 +1,56 @@ +from hatchet_sdk import DurableContext, EmptyModel, Hatchet + +try: + from .mock_classifier import mock_classify, mock_reply +except ImportError: + from mock_classifier import mock_classify, mock_reply + +hatchet = Hatchet(debug=True) + + +# > Step 01 Classify Task +@hatchet.durable_task(name="ClassifyMessage") +async def classify_message(input: EmptyModel, ctx: DurableContext) -> dict: + return {"category": mock_classify(input["message"])} + + +# > Step 02 Specialist Tasks +@hatchet.durable_task(name="HandleSupport") +async def handle_support(input: EmptyModel, ctx: DurableContext) -> dict: + return {"response": mock_reply(input["message"], "support"), "category": "support"} + + +@hatchet.durable_task(name="HandleSales") +async def handle_sales(input: EmptyModel, ctx: DurableContext) -> dict: + return {"response": mock_reply(input["message"], "sales"), "category": "sales"} + + +@hatchet.durable_task(name="HandleDefault") +async def handle_default(input: EmptyModel, ctx: DurableContext) -> dict: + return {"response": mock_reply(input["message"], "other"), "category": "other"} + + +# > Step 03 Router Task +@hatchet.durable_task(name="MessageRouter", execution_timeout="2m") +async def message_router(input: EmptyModel, ctx: DurableContext) -> dict: + classification = await classify_message.aio_run({"message": input["message"]}) + + if classification["category"] == "support": + return await handle_support.aio_run({"message": input["message"]}) + if classification["category"] == "sales": + return await handle_sales.aio_run({"message": input["message"]}) + return await handle_default.aio_run({"message": input["message"]}) + + +def main() -> None: + # > Step 04 Run Worker + worker = hatchet.worker( + "routing-worker", + workflows=[classify_message, handle_support, handle_sales, handle_default, message_router], + slots=5, + ) + worker.start() + + +if __name__ == "__main__": + main() diff --git a/examples/python/guides/scheduled_jobs/trigger.py b/examples/python/guides/scheduled_jobs/trigger.py new file mode 100644 index 0000000000..42f1f27a28 --- /dev/null +++ b/examples/python/guides/scheduled_jobs/trigger.py @@ -0,0 +1,15 @@ +from datetime import datetime, timedelta, timezone + +from hatchet_sdk import Hatchet + +hatchet = Hatchet(debug=True) + + +# > Step 02 Schedule One Time +# Schedule a one-time run at a specific time. +run_at = datetime.now(tz=timezone.utc) + timedelta(hours=1) +hatchet.scheduled.create( + workflow_name="ScheduledWorkflow", + trigger_at=run_at, + input={}, +) diff --git a/examples/python/guides/scheduled_jobs/worker.py b/examples/python/guides/scheduled_jobs/worker.py new file mode 100644 index 0000000000..6574a6e9b1 --- /dev/null +++ b/examples/python/guides/scheduled_jobs/worker.py @@ -0,0 +1,28 @@ +from hatchet_sdk import Context, EmptyModel, Hatchet + +hatchet = Hatchet(debug=True) + + +# > Step 01 Define Cron Task +cron_wf = hatchet.workflow(name="ScheduledWorkflow", on_crons=["0 * * * *"]) + + +@cron_wf.task() +def run_scheduled_job(input: EmptyModel, ctx: Context) -> dict: + """Runs every hour (minute 0).""" + return {"status": "completed", "job": "maintenance"} + + + + +def main() -> None: + # > Step 03 Run Worker + worker = hatchet.worker( + "scheduled-worker", + workflows=[cron_wf], + ) + worker.start() + + +if __name__ == "__main__": + main() diff --git a/examples/python/guides/streaming/client.py b/examples/python/guides/streaming/client.py new file mode 100644 index 0000000000..8fa63c3d16 --- /dev/null +++ b/examples/python/guides/streaming/client.py @@ -0,0 +1,13 @@ +from hatchet_sdk import Hatchet + +hatchet = Hatchet(debug=True) + + +# > Step 03 Subscribe Client +# Client triggers the task and subscribes to the stream. +async def run_and_subscribe(): + run = await hatchet.runs.create(workflow_name="stream_task", input={}) + async for chunk in hatchet.runs.subscribe_to_stream(run.run_id): + print(chunk) + + diff --git a/examples/python/guides/streaming/worker.py b/examples/python/guides/streaming/worker.py new file mode 100644 index 0000000000..653d8e2d48 --- /dev/null +++ b/examples/python/guides/streaming/worker.py @@ -0,0 +1,49 @@ +import asyncio + +from hatchet_sdk import ( + ConcurrencyExpression, + ConcurrencyLimitStrategy, + Context, + EmptyModel, + Hatchet, +) + +hatchet = Hatchet(debug=True) + + +# > Step 01 Define Streaming Task +@hatchet.task( + concurrency=ConcurrencyExpression( + expression="'constant'", + max_runs=1, + limit_strategy=ConcurrencyLimitStrategy.CANCEL_IN_PROGRESS, + ) +) +async def stream_task(input: EmptyModel, ctx: Context) -> dict: + """Emit chunks to subscribers in real-time.""" + for i in range(5): + await ctx.aio_put_stream(f"chunk-{i}") + await asyncio.sleep(0.5) + return {"status": "done"} + + + + +# > Step 02 Emit Chunks +async def _emit_chunks(ctx: Context) -> None: + for i in range(5): + await ctx.aio_put_stream(f"chunk-{i}") + await asyncio.sleep(0.5) + + +def main() -> None: + # > Step 04 Run Worker + worker = hatchet.worker( + "streaming-worker", + workflows=[stream_task], + ) + worker.start() + + +if __name__ == "__main__": + main() diff --git a/examples/python/guides/web_scraping/mock_scraper.py b/examples/python/guides/web_scraping/mock_scraper.py new file mode 100644 index 0000000000..0ff89a3b9d --- /dev/null +++ b/examples/python/guides/web_scraping/mock_scraper.py @@ -0,0 +1,19 @@ +"""Mock scraper - no external API dependencies.""" + +from datetime import datetime, timezone + + +def mock_scrape(url: str) -> dict: + return { + "url": url, + "title": f"Page: {url}", + "content": f"Mock scraped content from {url}. In production, use Firecrawl, Browserbase, or Playwright here.", + "scraped_at": datetime.now(timezone.utc).isoformat(), + } + + +def mock_extract(content: str) -> dict: + return { + "summary": content[:80], + "word_count": str(len(content.split())), + } diff --git a/examples/python/guides/web_scraping/worker.py b/examples/python/guides/web_scraping/worker.py new file mode 100644 index 0000000000..ce2be65a68 --- /dev/null +++ b/examples/python/guides/web_scraping/worker.py @@ -0,0 +1,81 @@ +import re + +from hatchet_sdk import Context, EmptyModel, Hatchet +from hatchet_sdk.rate_limit import RateLimit, RateLimitDuration + +try: + from .mock_scraper import mock_scrape +except ImportError: + from mock_scraper import mock_scrape + +hatchet = Hatchet(debug=True) + +scrape_wf = hatchet.workflow(name="ScrapeUrl") +process_wf = hatchet.workflow(name="ProcessContent") + + +# > Step 01 Define Scrape Task +@scrape_wf.task(execution_timeout="2m", retries=2) +async def scrape_url(input: dict, ctx: Context) -> dict: + return mock_scrape(input["url"]) + + +# > Step 02 Process Content +@process_wf.task() +async def process_content(input: dict, ctx: Context) -> dict: + content = input["content"] + links = re.findall(r"https?://[^\s<>\"']+", content) + summary = content[:200].strip() + word_count = len(content.split()) + return {"summary": summary, "word_count": word_count, "links": links} + + +# > Step 03 Cron Workflow +cron_wf = hatchet.workflow(name="WebScrapeWorkflow", on_crons=["0 */6 * * *"]) + + +@cron_wf.task() +async def scheduled_scrape(input: EmptyModel, ctx: Context) -> dict: + urls = [ + "https://example.com/pricing", + "https://example.com/blog", + "https://example.com/docs", + ] + + results = [] + for url in urls: + scraped = await scrape_wf.aio_run(input={"url": url}) + processed = await process_wf.aio_run(input={"url": url, "content": scraped["content"]}) + results.append({"url": url, **processed}) + return {"refreshed": len(results), "results": results} + + +# > Step 04 Rate Limited Scrape +SCRAPE_RATE_LIMIT_KEY = "scrape-rate-limit" + +rate_limited_wf = hatchet.workflow(name="RateLimitedScrape") + + +@rate_limited_wf.task( + execution_timeout="2m", + retries=2, + rate_limits=[RateLimit(static_key=SCRAPE_RATE_LIMIT_KEY, units=1)], +) +async def rate_limited_scrape(input: dict, ctx: Context) -> dict: + return mock_scrape(input["url"]) + + +def main() -> None: + # > Step 05 Run Worker + hatchet.rate_limits.put(SCRAPE_RATE_LIMIT_KEY, 10, RateLimitDuration.MINUTE) + + worker = hatchet.worker( + "web-scraping-worker", + workflows=[scrape_wf, process_wf, cron_wf, rate_limited_wf], + slots=5, + ) + worker.start() + + +if __name__ == "__main__": + main() diff --git a/examples/python/guides/webhook_processing/worker.py b/examples/python/guides/webhook_processing/worker.py new file mode 100644 index 0000000000..e49ecb47a2 --- /dev/null +++ b/examples/python/guides/webhook_processing/worker.py @@ -0,0 +1,49 @@ +from hatchet_sdk import Context, Hatchet +from pydantic import BaseModel + +hatchet = Hatchet(debug=True) + + +# > Step 01 Define Webhook Task +class WebhookPayload(BaseModel): + event_id: str + type: str + data: dict + + +@hatchet.task( + input_validator=WebhookPayload, + on_events=["webhook:stripe", "webhook:github"], +) +def process_webhook(input: WebhookPayload, ctx: Context) -> dict: + """Process webhook payload. Hatchet acknowledges immediately, processes async.""" + return {"processed": input.event_id, "type": input.type} + + + + +# > Step 02 Register Webhook +def forward_webhook_to_hatchet(event_key: str, payload: dict) -> None: + """Call this from your webhook endpoint to trigger the task.""" + hatchet.event.push(event_key, payload) +# forward_webhook_to_hatchet("webhook:stripe", {"event_id": "evt_123", "type": "payment", "data": {...}}) + + +# > Step 03 Process Payload +def _validate_and_process(input: WebhookPayload) -> dict: + if not input.event_id: + raise ValueError("event_id required for deduplication") + return {"processed": input.event_id, "type": input.type} + + +def main() -> None: + # > Step 04 Run Worker + worker = hatchet.worker( + "webhook-worker", + workflows=[process_webhook], + ) + worker.start() + + +if __name__ == "__main__": + main() diff --git a/examples/ruby/guides/Gemfile b/examples/ruby/guides/Gemfile new file mode 100644 index 0000000000..a016d54cca --- /dev/null +++ b/examples/ruby/guides/Gemfile @@ -0,0 +1,14 @@ +# frozen_string_literal: true + +# Hatchet guide examples (Ruby) - docs snippets with integration deps +source 'https://rubygems.org' + +gem 'hatchet-sdk', path: '../../ruby/src' +# LLM / embedding integrations +gem 'openai' +# OCR integration (requires Tesseract binary) +gem 'rtesseract' + +group :development do + gem 'rubocop', '~> 1.21' +end diff --git a/examples/ruby/guides/ai_agents/mock_agent.rb b/examples/ruby/guides/ai_agents/mock_agent.rb new file mode 100644 index 0000000000..75a24b39c5 --- /dev/null +++ b/examples/ruby/guides/ai_agents/mock_agent.rb @@ -0,0 +1,23 @@ +# frozen_string_literal: true + +# Mock LLM and tools - no external API dependencies +@llm_call_count = 0 + +def call_llm(_messages) + @llm_call_count += 1 + if @llm_call_count == 1 + { 'content' => '', 'tool_calls' => [{ 'name' => 'get_weather', 'args' => { 'location' => 'SF' } }], + 'done' => false } + else + { 'content' => "It's 72°F and sunny in SF.", 'tool_calls' => [], 'done' => true } + end +end + +def run_tool(name, args) + if name == 'get_weather' + loc = args['location'] || 'unknown' + "Weather in #{loc}: 72°F, sunny" + else + "Unknown tool: #{name}" + end +end diff --git a/examples/ruby/guides/ai_agents/worker.rb b/examples/ruby/guides/ai_agents/worker.rb new file mode 100644 index 0000000000..e385d0bce9 --- /dev/null +++ b/examples/ruby/guides/ai_agents/worker.rb @@ -0,0 +1,42 @@ +# frozen_string_literal: true + +require 'hatchet-sdk' +require_relative 'mock_agent' + +HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET) + +# > Step 02 Reasoning Loop +def agent_reasoning_loop(query) + messages = [{ 'role' => 'user', 'content' => query }] + 10.times do + resp = call_llm(messages) + return { 'response' => resp['content'] } if resp['done'] + + (resp['tool_calls'] || []).each do |tc| + result = run_tool(tc['name'], tc['args'] || {}) + messages << { 'role' => 'tool', 'content' => result } + end + end + { 'response' => 'Max iterations reached' } +end + +# > Step 01 Define Agent Task +AGENT_TASK = HATCHET.durable_task(name: 'ReasoningLoopAgent') do |input, _ctx| + query = input.is_a?(Hash) && input['query'] ? input['query'].to_s : 'Hello' + agent_reasoning_loop(query) +end + +# > Step 03 Stream Response +STREAMING_AGENT = HATCHET.durable_task(name: 'StreamingAgentTask') do |_input, ctx| + %w[Hello \s world !].each { |t| ctx.put_stream(t) } + { 'done' => true } +end + + +def main + # > Step 04 Run Worker + worker = HATCHET.worker('agent-worker', slots: 5, workflows: [AGENT_TASK, STREAMING_AGENT]) + worker.start +end + +main if __FILE__ == $PROGRAM_NAME diff --git a/examples/ruby/guides/batch_processing/worker.rb b/examples/ruby/guides/batch_processing/worker.rb new file mode 100644 index 0000000000..009a187d4d --- /dev/null +++ b/examples/ruby/guides/batch_processing/worker.rb @@ -0,0 +1,32 @@ +# frozen_string_literal: true + +require 'hatchet-sdk' + +HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET) + +# > Step 01 Define Parent Task +BATCH_PARENT_WF = HATCHET.workflow(name: 'BatchParent') +BATCH_CHILD_WF = HATCHET.workflow(name: 'BatchChild') + +BATCH_PARENT_WF.durable_task(:spawn_children) do |input, _ctx| + items = input['items'] || [] + results = BATCH_CHILD_WF.run_many( + items.map { |item_id| BATCH_CHILD_WF.create_bulk_run_item(input: { 'item_id' => item_id }) } + ) + { 'processed' => results.size, 'results' => results } +end + + +# > Step 03 Process Item +BATCH_CHILD_WF.task(:process_item) do |input, _ctx| + { 'status' => 'done', 'item_id' => input['item_id'] } +end + + +def main + # > Step 04 Run Worker + worker = HATCHET.worker('batch-worker', slots: 20, workflows: [BATCH_PARENT_WF, BATCH_CHILD_WF]) + worker.start +end + +main if __FILE__ == $PROGRAM_NAME diff --git a/examples/ruby/guides/document_processing/mock_ocr.rb b/examples/ruby/guides/document_processing/mock_ocr.rb new file mode 100644 index 0000000000..3416cfc3b0 --- /dev/null +++ b/examples/ruby/guides/document_processing/mock_ocr.rb @@ -0,0 +1,6 @@ +# frozen_string_literal: true + +# Mock OCR - no external dependencies +def parse_document(content) + "Parsed text from #{content.size} bytes" +end diff --git a/examples/ruby/guides/document_processing/worker.rb b/examples/ruby/guides/document_processing/worker.rb new file mode 100644 index 0000000000..6a0a83fc69 --- /dev/null +++ b/examples/ruby/guides/document_processing/worker.rb @@ -0,0 +1,37 @@ +# frozen_string_literal: true + +require 'hatchet-sdk' +require_relative 'mock_ocr' + +HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET) + +# > Step 01 Define DAG +DOC_WF = HATCHET.workflow(name: 'DocumentPipeline') + +INGEST = DOC_WF.task(:ingest) do |input, _ctx| + { 'doc_id' => input['doc_id'], 'content' => input['content'] } +end + + +# > Step 02 Parse Stage +PARSE = DOC_WF.task(:parse, parents: [INGEST]) do |input, ctx| + ingested = ctx.task_output(INGEST) + text = parse_document(ingested['content']) + { 'doc_id' => input['doc_id'], 'text' => text } +end + + +# > Step 03 Extract Stage +DOC_WF.task(:extract, parents: [PARSE]) do |_input, ctx| + parsed = ctx.task_output(PARSE) + { 'doc_id' => parsed['doc_id'], 'entities' => %w[entity1 entity2] } +end + + +def main + # > Step 04 Run Worker + worker = HATCHET.worker('document-worker', workflows: [DOC_WF]) + worker.start +end + +main if __FILE__ == $PROGRAM_NAME diff --git a/examples/ruby/guides/evaluator_optimizer/mock_llm.rb b/examples/ruby/guides/evaluator_optimizer/mock_llm.rb new file mode 100644 index 0000000000..3c3c37781f --- /dev/null +++ b/examples/ruby/guides/evaluator_optimizer/mock_llm.rb @@ -0,0 +1,20 @@ +# frozen_string_literal: true + +@generate_count = 0 + +def mock_generate(_prompt) + @generate_count += 1 + if @generate_count == 1 + 'Check out our product! Buy now!' + else + 'Discover how our tool saves teams 10 hours/week. Try it free.' + end +end + +def mock_evaluate(draft) + if draft.length < 40 + { 'score' => 0.4, 'feedback' => 'Too short and pushy. Add a specific benefit and soften the CTA.' } + else + { 'score' => 0.9, 'feedback' => 'Clear value prop, appropriate tone.' } + end +end diff --git a/examples/ruby/guides/evaluator_optimizer/worker.rb b/examples/ruby/guides/evaluator_optimizer/worker.rb new file mode 100644 index 0000000000..8759bbcf27 --- /dev/null +++ b/examples/ruby/guides/evaluator_optimizer/worker.rb @@ -0,0 +1,59 @@ +# frozen_string_literal: true + +require 'hatchet-sdk' +require_relative 'mock_llm' + +HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET) + +GENERATOR_WF = HATCHET.workflow(name: 'GenerateDraft') +EVALUATOR_WF = HATCHET.workflow(name: 'EvaluateDraft') + +# > Step 01 Define Tasks +GENERATOR_WF.task(:generate_draft) do |input, _ctx| + prompt = if input['feedback'] + "Improve this draft.\n\nDraft: #{input['previous_draft']}\nFeedback: #{input['feedback']}" + else + "Write a social media post about \"#{input['topic']}\" for #{input['audience']}. Under 100 words." + end + { 'draft' => mock_generate(prompt) } +end + +EVALUATOR_WF.task(:evaluate_draft) do |input, _ctx| + mock_evaluate(input['draft']) +end + +# > Step 02 Optimization Loop +OPTIMIZER_TASK = HATCHET.durable_task(name: 'EvaluatorOptimizer', execution_timeout: '5m') do |input, _ctx| + max_iterations = 3 + threshold = 0.8 + draft = '' + feedback = '' + + max_iterations.times do |i| + generated = GENERATOR_WF.run( + 'topic' => input['topic'], 'audience' => input['audience'], + 'previous_draft' => draft.empty? ? nil : draft, + 'feedback' => feedback.empty? ? nil : feedback + ) + draft = generated['draft'] + + evaluation = EVALUATOR_WF.run( + 'draft' => draft, 'topic' => input['topic'], 'audience' => input['audience'] + ) + + next { 'draft' => draft, 'iterations' => i + 1, 'score' => evaluation['score'] } if evaluation['score'] >= threshold + + feedback = evaluation['feedback'] + end + + { 'draft' => draft, 'iterations' => max_iterations, 'score' => -1 } +end + +def main + # > Step 03 Run Worker + worker = HATCHET.worker('evaluator-optimizer-worker', slots: 5, + workflows: [GENERATOR_WF, EVALUATOR_WF, OPTIMIZER_TASK]) + worker.start +end + +main if __FILE__ == $PROGRAM_NAME diff --git a/examples/ruby/guides/event_driven/trigger.rb b/examples/ruby/guides/event_driven/trigger.rb new file mode 100644 index 0000000000..99f505da25 --- /dev/null +++ b/examples/ruby/guides/event_driven/trigger.rb @@ -0,0 +1,9 @@ +# frozen_string_literal: true + +require 'hatchet-sdk' + +HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET) + +# > Step 03 Push Event +# Push an event to trigger the workflow. Use the same key as on_events. +HATCHET.event.push('order:created', 'message' => 'Order #1234', 'source' => 'webhook') diff --git a/examples/ruby/guides/event_driven/worker.rb b/examples/ruby/guides/event_driven/worker.rb new file mode 100644 index 0000000000..076054225d --- /dev/null +++ b/examples/ruby/guides/event_driven/worker.rb @@ -0,0 +1,27 @@ +# frozen_string_literal: true + +require 'hatchet-sdk' + +HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET) + +# > Step 01 Define Event Task +EVENT_WF = HATCHET.workflow(name: 'EventDrivenWorkflow', on_events: ['order:created', 'user:signup']) + +EVENT_WF.task(:process_event) do |input, _ctx| + { 'processed' => input['message'], 'source' => input['source'] || 'api' } +end + + +# > Step 02 Register Event Trigger +# Push an event from your app to trigger the workflow. Use the same key as on_events. +def push_order_event + HATCHET.event.push('order:created', 'message' => 'Order #1234', 'source' => 'webhook') +end + +def main + # > Step 04 Run Worker + worker = HATCHET.worker('event-driven-worker', workflows: [EVENT_WF]) + worker.start +end + +main if __FILE__ == $PROGRAM_NAME diff --git a/examples/ruby/guides/human_in_the_loop/trigger.rb b/examples/ruby/guides/human_in_the_loop/trigger.rb new file mode 100644 index 0000000000..a6403003f1 --- /dev/null +++ b/examples/ruby/guides/human_in_the_loop/trigger.rb @@ -0,0 +1,17 @@ +# frozen_string_literal: true + +require 'hatchet-sdk' + +HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET) + +# > Step 03 Push Approval Event +# Include the run_id so the event matches the specific task waiting for it. +def push_approval(run_id:, approved:, reason: '') + HATCHET.events.create( + key: 'approval:decision', + data: { 'runId' => run_id, 'approved' => approved, 'reason' => reason } + ) +end + +# Approve: push_approval(run_id: 'run-id-from-ui', approved: true) +# Reject: push_approval(run_id: 'run-id-from-ui', approved: false, reason: "needs review") diff --git a/examples/ruby/guides/human_in_the_loop/worker.rb b/examples/ruby/guides/human_in_the_loop/worker.rb new file mode 100644 index 0000000000..6085b926bf --- /dev/null +++ b/examples/ruby/guides/human_in_the_loop/worker.rb @@ -0,0 +1,41 @@ +# frozen_string_literal: true + +require 'hatchet-sdk' + +HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET) + +APPROVAL_EVENT_KEY = 'approval:decision' + +# > Step 02 Wait For Event +def wait_for_approval(ctx) + run_id = ctx.workflow_run_id + ctx.wait_for( + 'approval', + Hatchet::UserEventCondition.new( + event_key: APPROVAL_EVENT_KEY, + expression: "input.runId == '#{run_id}'" + ) + ) +end + +# > Step 01 Define Approval Task +APPROVAL_TASK = HATCHET.durable_task(name: 'ApprovalTask') do |_input, ctx| + proposed_action = { 'action' => 'send_email', 'to' => 'user@example.com' } + approval = wait_for_approval(ctx) + if approval['approved'] + { 'status' => 'approved', 'action' => proposed_action } + else + { 'status' => 'rejected', 'reason' => approval['reason'].to_s } + end +end + +def main + # > Step 04 Run Worker + worker = HATCHET.worker( + 'human-in-the-loop-worker', + workflows: [APPROVAL_TASK] + ) + worker.start +end + +main if __FILE__ == $PROGRAM_NAME diff --git a/examples/ruby/guides/integrations/embedding_openai.rb b/examples/ruby/guides/integrations/embedding_openai.rb new file mode 100644 index 0000000000..54e5e27678 --- /dev/null +++ b/examples/ruby/guides/integrations/embedding_openai.rb @@ -0,0 +1,14 @@ +# frozen_string_literal: true + +# Third-party integration - requires: bundle add openai +# See: /guides/rag-and-indexing + +require 'openai' + +OpenAI::Client.new + +# > OpenAI embedding usage +def embed(text) + response = client.embeddings(parameters: { model: 'text-embedding-3-small', input: text }) + response.dig('data', 0, 'embedding') || [] +end diff --git a/examples/ruby/guides/integrations/llm_openai.rb b/examples/ruby/guides/integrations/llm_openai.rb new file mode 100644 index 0000000000..71ec7699bc --- /dev/null +++ b/examples/ruby/guides/integrations/llm_openai.rb @@ -0,0 +1,33 @@ +# frozen_string_literal: true + +# Third-party integration - requires: bundle add openai +# See: /guides/ai-agents + +require 'openai' +require 'json' + +OpenAI::Client.new + +# > OpenAI usage +def complete(messages) + response = client.chat( + parameters: { + model: 'gpt-4o-mini', + messages: messages, + tool_choice: 'auto', + tools: [{ + type: 'function', + function: { + name: 'get_weather', + description: 'Get weather for a location', + parameters: { type: 'object', properties: { location: { type: 'string' } }, required: ['location'] } + } + }] + } + ) + msg = response.dig('choices', 0, 'message') + tool_calls = msg['tool_calls']&.map do |tc| + { 'name' => tc.dig('function', 'name'), 'args' => JSON.parse(tc.dig('function', 'arguments') || '{}') } + end || [] + { 'content' => msg['content'] || '', 'tool_calls' => tool_calls, 'done' => tool_calls.empty? } +end diff --git a/examples/ruby/guides/integrations/ocr_tesseract.rb b/examples/ruby/guides/integrations/ocr_tesseract.rb new file mode 100644 index 0000000000..c25e92640a --- /dev/null +++ b/examples/ruby/guides/integrations/ocr_tesseract.rb @@ -0,0 +1,11 @@ +# frozen_string_literal: true + +# Third-party integration - requires: bundle add rtesseract; install Tesseract binary +# See: /guides/document-processing + +require 'rtesseract' + +# > Tesseract usage +def parse_document(content) + RTesseract.new(nil, data: content).to_s +end diff --git a/examples/ruby/guides/llm_pipelines/mock_llm.rb b/examples/ruby/guides/llm_pipelines/mock_llm.rb new file mode 100644 index 0000000000..d9b7e364a8 --- /dev/null +++ b/examples/ruby/guides/llm_pipelines/mock_llm.rb @@ -0,0 +1,6 @@ +# frozen_string_literal: true + +# Mock LLM - no external API dependencies +def generate(prompt) + { 'content' => "Generated for: #{prompt[0, 50]}...", 'valid' => true } +end diff --git a/examples/ruby/guides/llm_pipelines/worker.rb b/examples/ruby/guides/llm_pipelines/worker.rb new file mode 100644 index 0000000000..4887005411 --- /dev/null +++ b/examples/ruby/guides/llm_pipelines/worker.rb @@ -0,0 +1,38 @@ +# frozen_string_literal: true + +require 'hatchet-sdk' +require_relative 'mock_llm' + +HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET) + +# > Step 01 Define Pipeline +LLM_WF = HATCHET.workflow(name: 'LLMPipeline') + +PROMPT_TASK = LLM_WF.task(:prompt_task) do |input, _ctx| + { 'prompt' => input['prompt'] } +end + + +# > Step 02 Prompt Task +def build_prompt(user_input, context = '') + base = "Process the following: #{user_input}" + context.empty? ? base : "#{base}\nContext: #{context}" +end + +# > Step 03 Validate Task +LLM_WF.task(:generate_task, parents: [PROMPT_TASK]) do |_input, ctx| + prev = ctx.task_output(PROMPT_TASK) + output = generate(prev['prompt']) + raise 'Validation failed' unless output['valid'] + + output +end + + +def main + # > Step 04 Run Worker + worker = HATCHET.worker('llm-pipeline-worker', workflows: [LLM_WF]) + worker.start +end + +main if __FILE__ == $PROGRAM_NAME diff --git a/examples/ruby/guides/multi_agent/mock_llm.rb b/examples/ruby/guides/multi_agent/mock_llm.rb new file mode 100644 index 0000000000..6d60c08d85 --- /dev/null +++ b/examples/ruby/guides/multi_agent/mock_llm.rb @@ -0,0 +1,21 @@ +# frozen_string_literal: true + +@orchestrator_call_count = 0 + +def mock_orchestrator_llm(_messages) + @orchestrator_call_count += 1 + case @orchestrator_call_count + when 1 + { 'done' => false, 'content' => '', + 'tool_call' => { 'name' => 'research', 'args' => { 'task' => 'Find key facts about the topic' } } } + when 2 + { 'done' => false, 'content' => '', + 'tool_call' => { 'name' => 'writing', 'args' => { 'task' => 'Write a summary from the research' } } } + else + { 'done' => true, 'content' => 'Here is the final report combining research and writing.' } + end +end + +def mock_specialist_llm(task, role) + "[#{role}] Completed: #{task}" +end diff --git a/examples/ruby/guides/multi_agent/worker.rb b/examples/ruby/guides/multi_agent/worker.rb new file mode 100644 index 0000000000..9f5cd62f28 --- /dev/null +++ b/examples/ruby/guides/multi_agent/worker.rb @@ -0,0 +1,61 @@ +# frozen_string_literal: true + +require 'hatchet-sdk' +require_relative 'mock_llm' + +HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET) + +# > Step 01 Specialist Agents +RESEARCH_TASK = HATCHET.durable_task(name: 'ResearchSpecialist', execution_timeout: '3m') do |input, _ctx| + { 'result' => mock_specialist_llm(input['task'], 'research') } +end + +WRITING_TASK = HATCHET.durable_task(name: 'WritingSpecialist', execution_timeout: '2m') do |input, _ctx| + { 'result' => mock_specialist_llm(input['task'], 'writing') } +end + +CODE_TASK = HATCHET.durable_task(name: 'CodeSpecialist', execution_timeout: '2m') do |input, _ctx| + { 'result' => mock_specialist_llm(input['task'], 'code') } +end + +SPECIALISTS = { + 'research' => RESEARCH_TASK, + 'writing' => WRITING_TASK, + 'code' => CODE_TASK +}.freeze + +# > Step 02 Orchestrator Loop +ORCHESTRATOR = HATCHET.durable_task(name: 'MultiAgentOrchestrator', execution_timeout: '15m') do |input, _ctx| + messages = [{ 'role' => 'user', 'content' => input['goal'] }] + + result = nil + 10.times do + response = mock_orchestrator_llm(messages) + + if response['done'] + result = { 'result' => response['content'] } + break + end + + specialist = SPECIALISTS[response['tool_call']['name']] + raise "Unknown specialist: #{response['tool_call']['name']}" unless specialist + + specialist_result = specialist.run( + 'task' => response['tool_call']['args']['task'], + 'context' => messages.map { |m| m['content'] }.join("\n") + ) + + messages << { 'role' => 'assistant', 'content' => "Called #{response['tool_call']['name']}" } + messages << { 'role' => 'tool', 'content' => specialist_result['result'] } + end + + result || { 'result' => 'Max iterations reached' } +end + +def main + # > Step 03 Run Worker + worker = HATCHET.worker('multi-agent-worker', slots: 10, workflows: [RESEARCH_TASK, WRITING_TASK, CODE_TASK, ORCHESTRATOR]) + worker.start +end + +main if __FILE__ == $PROGRAM_NAME diff --git a/examples/ruby/guides/parallelization/mock_llm.rb b/examples/ruby/guides/parallelization/mock_llm.rb new file mode 100644 index 0000000000..58b8b71301 --- /dev/null +++ b/examples/ruby/guides/parallelization/mock_llm.rb @@ -0,0 +1,18 @@ +# frozen_string_literal: true + +def mock_generate_content(message) + "Here is a helpful response to: #{message}" +end + +def mock_safety_check(message) + if message.downcase.include?('unsafe') + { 'safe' => false, 'reason' => 'Content flagged as potentially unsafe.' } + else + { 'safe' => true, 'reason' => 'Content is appropriate.' } + end +end + +def mock_evaluate_content(content) + score = content.length > 20 ? 0.85 : 0.3 + { 'score' => score, 'approved' => score >= 0.7 } +end diff --git a/examples/ruby/guides/parallelization/worker.rb b/examples/ruby/guides/parallelization/worker.rb new file mode 100644 index 0000000000..4a4504d8d9 --- /dev/null +++ b/examples/ruby/guides/parallelization/worker.rb @@ -0,0 +1,57 @@ +# frozen_string_literal: true + +require 'hatchet-sdk' +require_relative 'mock_llm' + +HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET) + +CONTENT_WF = HATCHET.workflow(name: 'GenerateContent') +SAFETY_WF = HATCHET.workflow(name: 'SafetyCheck') +EVALUATOR_WF = HATCHET.workflow(name: 'EvaluateContent') + +# > Step 01 Parallel Tasks +CONTENT_WF.task(:generate_content) do |input, _ctx| + { 'content' => mock_generate_content(input['message']) } +end + +SAFETY_WF.task(:safety_check) do |input, _ctx| + mock_safety_check(input['message']) +end + +EVALUATOR_WF.task(:evaluate_content) do |input, _ctx| + mock_evaluate_content(input['content']) +end + +# > Step 02 Sectioning +SECTIONING_TASK = HATCHET.durable_task(name: 'ParallelSectioning', execution_timeout: '2m') do |input, _ctx| + threads = [] + threads << Thread.new { CONTENT_WF.run('message' => input['message']) } + threads << Thread.new { SAFETY_WF.run('message' => input['message']) } + content_result, safety_result = threads.map(&:value) + + if safety_result['safe'] + { 'blocked' => false, 'content' => content_result['content'] } + else + { 'blocked' => true, 'reason' => safety_result['reason'] } + end +end + +# > Step 03 Voting +VOTING_TASK = HATCHET.durable_task(name: 'ParallelVoting', execution_timeout: '3m') do |input, _ctx| + threads = 3.times.map { Thread.new { EVALUATOR_WF.run('content' => input['content']) } } + votes = threads.map(&:value) + + approvals = votes.count { |v| v['approved'] } + avg_score = votes.sum { |v| v['score'] } / votes.size.to_f + + { 'approved' => approvals >= 2, 'average_score' => avg_score, 'votes' => votes.size } +end + +def main + # > Step 04 Run Worker + worker = HATCHET.worker('parallelization-worker', slots: 10, + workflows: [CONTENT_WF, SAFETY_WF, EVALUATOR_WF, SECTIONING_TASK, VOTING_TASK]) + worker.start +end + +main if __FILE__ == $PROGRAM_NAME diff --git a/examples/ruby/guides/rag_indexing/mock_embedding.rb b/examples/ruby/guides/rag_indexing/mock_embedding.rb new file mode 100644 index 0000000000..aa8cad26f0 --- /dev/null +++ b/examples/ruby/guides/rag_indexing/mock_embedding.rb @@ -0,0 +1,6 @@ +# frozen_string_literal: true + +# Mock embedding - no external API dependencies +def embed(_text) + [0.1] * 64 +end diff --git a/examples/ruby/guides/rag_indexing/worker.rb b/examples/ruby/guides/rag_indexing/worker.rb new file mode 100644 index 0000000000..cdca5e5724 --- /dev/null +++ b/examples/ruby/guides/rag_indexing/worker.rb @@ -0,0 +1,51 @@ +# frozen_string_literal: true + +require 'hatchet-sdk' +require_relative 'mock_embedding' + +HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET) + +# > Step 01 Define Workflow +RAG_WF = HATCHET.workflow(name: 'RAGPipeline') + +# > Step 02 Define Ingest Task +INGEST = RAG_WF.task(:ingest) do |input, _ctx| + { 'doc_id' => input['doc_id'], 'content' => input['content'] } +end + + +# > Step 03 Chunk Task +def chunk_content(content, chunk_size = 100) + content.scan(/.{1,#{chunk_size}}/) +end + +# > Step 04 Embed Task +EMBED_CHUNK_TASK = HATCHET.task(name: 'embed-chunk') do |input, _ctx| + { 'vector' => embed(input['chunk']) } +end + +RAG_WF.durable_task(:chunk_and_embed, parents: [INGEST]) do |_input, ctx| + ingested = ctx.task_output(INGEST) + content = ingested['content'] + chunks = content.scan(/.{1,100}/) + results = EMBED_CHUNK_TASK.run_many( + chunks.map { |c| EMBED_CHUNK_TASK.create_bulk_run_item(input: { 'chunk' => c }) } + ) + { 'doc_id' => ingested['doc_id'], 'vectors' => results.map { |r| r['vector'] } } +end + + +# > Step 05 Query Task +QUERY_TASK = HATCHET.durable_task(name: 'rag-query') do |input, _ctx| + result = EMBED_CHUNK_TASK.run('chunk' => input['query']) + # Replace with a real vector DB lookup in production + { 'query' => input['query'], 'vector' => result['vector'], 'results' => [] } +end + +def main + # > Step 06 Run Worker + worker = HATCHET.worker('rag-worker', workflows: [RAG_WF, EMBED_CHUNK_TASK, QUERY_TASK]) + worker.start +end + +main if __FILE__ == $PROGRAM_NAME diff --git a/examples/ruby/guides/routing/mock_classifier.rb b/examples/ruby/guides/routing/mock_classifier.rb new file mode 100644 index 0000000000..b0979ce58a --- /dev/null +++ b/examples/ruby/guides/routing/mock_classifier.rb @@ -0,0 +1,20 @@ +# frozen_string_literal: true + +def mock_classify(message) + lower = message.downcase + return 'support' if %w[bug error help].any? { |w| lower.include?(w) } + return 'sales' if %w[price buy plan].any? { |w| lower.include?(w) } + + 'other' +end + +def mock_reply(message, role) + case role + when 'support' + "[Support] I can help with that technical issue. Let me look into: #{message}" + when 'sales' + "[Sales] Great question about pricing! Here's what I can tell you about: #{message}" + else + "[General] Thanks for reaching out. Regarding: #{message}" + end +end diff --git a/examples/ruby/guides/routing/worker.rb b/examples/ruby/guides/routing/worker.rb new file mode 100644 index 0000000000..29135d62f7 --- /dev/null +++ b/examples/ruby/guides/routing/worker.rb @@ -0,0 +1,47 @@ +# frozen_string_literal: true + +require 'hatchet-sdk' +require_relative 'mock_classifier' + +HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET) + +# > Step 01 Classify Task +CLASSIFY_TASK = HATCHET.durable_task(name: 'ClassifyMessage') do |input, _ctx| + { 'category' => mock_classify(input['message']) } +end + +# > Step 02 Specialist Tasks +SUPPORT_TASK = HATCHET.durable_task(name: 'HandleSupport') do |input, _ctx| + { 'response' => mock_reply(input['message'], 'support'), 'category' => 'support' } +end + +SALES_TASK = HATCHET.durable_task(name: 'HandleSales') do |input, _ctx| + { 'response' => mock_reply(input['message'], 'sales'), 'category' => 'sales' } +end + +DEFAULT_TASK = HATCHET.durable_task(name: 'HandleDefault') do |input, _ctx| + { 'response' => mock_reply(input['message'], 'other'), 'category' => 'other' } +end + +# > Step 03 Router Task +ROUTER_TASK = HATCHET.durable_task(name: 'MessageRouter', execution_timeout: '2m') do |input, _ctx| + classification = CLASSIFY_TASK.run('message' => input['message']) + + case classification['category'] + when 'support' + SUPPORT_TASK.run('message' => input['message']) + when 'sales' + SALES_TASK.run('message' => input['message']) + else + DEFAULT_TASK.run('message' => input['message']) + end +end + +def main + # > Step 04 Run Worker + worker = HATCHET.worker('routing-worker', slots: 5, + workflows: [CLASSIFY_TASK, SUPPORT_TASK, SALES_TASK, DEFAULT_TASK, ROUTER_TASK]) + worker.start +end + +main if __FILE__ == $PROGRAM_NAME diff --git a/examples/ruby/guides/scheduled_jobs/trigger.rb b/examples/ruby/guides/scheduled_jobs/trigger.rb new file mode 100644 index 0000000000..4a733d406f --- /dev/null +++ b/examples/ruby/guides/scheduled_jobs/trigger.rb @@ -0,0 +1,10 @@ +# frozen_string_literal: true + +require 'hatchet-sdk' + +HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET) + +# > Step 02 Schedule One Time +# Schedule a one-time run at a specific time. +run_at = Time.now + 3600 +HATCHET.scheduled.create(workflow_name: 'ScheduledWorkflow', trigger_at: run_at, input: {}) diff --git a/examples/ruby/guides/scheduled_jobs/worker.rb b/examples/ruby/guides/scheduled_jobs/worker.rb new file mode 100644 index 0000000000..e97d472859 --- /dev/null +++ b/examples/ruby/guides/scheduled_jobs/worker.rb @@ -0,0 +1,21 @@ +# frozen_string_literal: true + +require 'hatchet-sdk' + +HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET) + +# > Step 01 Define Cron Task +CRON_WF = HATCHET.workflow(name: 'ScheduledWorkflow', on_crons: ['0 * * * *']) + +CRON_WF.task(:run_scheduled_job) do |_input, _ctx| + { 'status' => 'completed', 'job' => 'maintenance' } +end + + +def main + # > Step 03 Run Worker + worker = HATCHET.worker('scheduled-worker', workflows: [CRON_WF]) + worker.start +end + +main if __FILE__ == $PROGRAM_NAME diff --git a/examples/ruby/guides/streaming/client.rb b/examples/ruby/guides/streaming/client.rb new file mode 100644 index 0000000000..7fe5c1382e --- /dev/null +++ b/examples/ruby/guides/streaming/client.rb @@ -0,0 +1,14 @@ +# frozen_string_literal: true + +require 'hatchet-sdk' + +HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET) + +# > Step 03 Subscribe Client +# Client triggers the task and subscribes to the stream. +def run_and_subscribe + run = HATCHET.runs.create(workflow_name: 'stream-example', input: {}) + HATCHET.runs.subscribe_to_stream(run.run_id) do |chunk| + puts chunk + end +end diff --git a/examples/ruby/guides/streaming/worker.rb b/examples/ruby/guides/streaming/worker.rb new file mode 100644 index 0000000000..641960b78b --- /dev/null +++ b/examples/ruby/guides/streaming/worker.rb @@ -0,0 +1,31 @@ +# frozen_string_literal: true + +require 'hatchet-sdk' + +HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET) + +# > Step 01 Define Streaming Task +STREAM_TASK = HATCHET.task(name: 'stream-example') do |_input, ctx| + 5.times do |i| + ctx.put_stream("chunk-#{i}") + sleep 0.5 + end + { 'status' => 'done' } +end + + +# > Step 02 Emit Chunks +def emit_chunks(ctx) + 5.times do |i| + ctx.put_stream("chunk-#{i}") + sleep 0.5 + end +end + +def main + # > Step 04 Run Worker + worker = HATCHET.worker('streaming-worker', workflows: [STREAM_TASK]) + worker.start +end + +main if __FILE__ == $PROGRAM_NAME diff --git a/examples/ruby/guides/web_scraping/mock_scraper.rb b/examples/ruby/guides/web_scraping/mock_scraper.rb new file mode 100644 index 0000000000..5e4161cd93 --- /dev/null +++ b/examples/ruby/guides/web_scraping/mock_scraper.rb @@ -0,0 +1,19 @@ +# frozen_string_literal: true + +require 'time' + +def mock_scrape(url) + { + 'url' => url, + 'title' => "Page: #{url}", + 'content' => "Mock scraped content from #{url}. In production, use Firecrawl, Browserbase, or Playwright here.", + 'scraped_at' => Time.now.utc.iso8601 + } +end + +def mock_extract(content) + { + 'summary' => content[0, 80], + 'word_count' => content.split.size.to_s + } +end diff --git a/examples/ruby/guides/web_scraping/worker.rb b/examples/ruby/guides/web_scraping/worker.rb new file mode 100644 index 0000000000..104b54e699 --- /dev/null +++ b/examples/ruby/guides/web_scraping/worker.rb @@ -0,0 +1,67 @@ +# frozen_string_literal: true + +require 'hatchet-sdk' +require_relative 'mock_scraper' + +HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET) + +SCRAPE_WF = HATCHET.workflow(name: 'ScrapeUrl') +PROCESS_WF = HATCHET.workflow(name: 'ProcessContent') + +# > Step 01 Define Scrape Task +SCRAPE_WF.task(:scrape_url, execution_timeout: '2m', retries: 2) do |input, _ctx| + mock_scrape(input['url']) +end + +# > Step 02 Process Content +PROCESS_WF.task(:process_content) do |input, _ctx| + content = input['content'] + links = content.scan(%r{https?://[^\s<>"']+}) + summary = content[0, 200].strip + word_count = content.split.size + { 'summary' => summary, 'word_count' => word_count, 'links' => links } +end + +# > Step 03 Cron Workflow +CRON_WF = HATCHET.workflow(name: 'WebScrapeWorkflow', on_crons: ['0 */6 * * *']) + +CRON_WF.task(:scheduled_scrape) do |_input, _ctx| + urls = %w[ + https://example.com/pricing + https://example.com/blog + https://example.com/docs + ] + + results = urls.map do |url| + scraped = SCRAPE_WF.run('url' => url) + processed = PROCESS_WF.run('url' => url, 'content' => scraped['content']) + { 'url' => url }.merge(processed) + end + { 'refreshed' => results.size, 'results' => results } +end + +# > Step 04 Rate Limited Scrape +SCRAPE_RATE_LIMIT_KEY = 'scrape-rate-limit' + +RATE_LIMITED_WF = HATCHET.workflow(name: 'RateLimitedScrape') + +RATE_LIMITED_WF.task( + :rate_limited_scrape, + execution_timeout: '2m', + retries: 2, + rate_limits: [Hatchet::RateLimit.new(static_key: SCRAPE_RATE_LIMIT_KEY, units: 1)] +) do |input, _ctx| + mock_scrape(input['url']) +end + +def main + # > Step 05 Run Worker + HATCHET.rate_limits.put(SCRAPE_RATE_LIMIT_KEY, 10, :minute) + + worker = HATCHET.worker('web-scraping-worker', + slots: 5, + workflows: [SCRAPE_WF, PROCESS_WF, CRON_WF, RATE_LIMITED_WF]) + worker.start +end + +main if __FILE__ == $PROGRAM_NAME diff --git a/examples/ruby/guides/webhook_processing/worker.rb b/examples/ruby/guides/webhook_processing/worker.rb new file mode 100644 index 0000000000..bb3f617cd0 --- /dev/null +++ b/examples/ruby/guides/webhook_processing/worker.rb @@ -0,0 +1,35 @@ +# frozen_string_literal: true + +require 'hatchet-sdk' + +HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET) + +# > Step 01 Define Webhook Task +PROCESS_WEBHOOK = HATCHET.task( + name: 'process-webhook', + on_events: ['webhook:stripe', 'webhook:github'] +) do |input, _ctx| + { 'processed' => input['event_id'], 'type' => input['type'] } +end + + +# > Step 02 Register Webhook +def forward_webhook(event_key, payload) + HATCHET.event.push(event_key, payload) +end +# forward_webhook("webhook:stripe", { "event_id" => "evt_123", "type" => "payment", "data" => {} }) + +# > Step 03 Process Payload +def validate_and_process(input) + raise 'event_id required for deduplication' if input['event_id'].to_s.empty? + + { 'processed' => input['event_id'], 'type' => input['type'] } +end + +def main + # > Step 04 Run Worker + worker = HATCHET.worker('webhook-worker', workflows: [PROCESS_WEBHOOK]) + worker.start +end + +main if __FILE__ == $PROGRAM_NAME diff --git a/examples/typescript/guides/ai-agents/mock-agent.ts b/examples/typescript/guides/ai-agents/mock-agent.ts new file mode 100644 index 0000000000..3bbc2b559d --- /dev/null +++ b/examples/typescript/guides/ai-agents/mock-agent.ts @@ -0,0 +1,29 @@ +/** Mock LLM and tools - no external API dependencies */ + +let callCount = 0; + +export interface LLMResponse { + content: string; + toolCalls: Array<{ name: string; args: Record }>; + done: boolean; +} + +export function callLlm(messages: Array<{ role: string; content: string }>): LLMResponse { + callCount += 1; + if (callCount === 1) { + return { + content: '', + toolCalls: [{ name: 'get_weather', args: { location: 'SF' } }], + done: false, + }; + } + return { content: "It's 72°F and sunny in SF.", toolCalls: [], done: true }; +} + +export function runTool(name: string, args: Record): string { + if (name === 'get_weather') { + const loc = String(args?.location ?? 'unknown'); + return `Weather in ${loc}: 72°F, sunny`; + } + return `Unknown tool: ${name}`; +} diff --git a/examples/typescript/guides/ai-agents/worker.ts b/examples/typescript/guides/ai-agents/worker.ts new file mode 100644 index 0000000000..900fb56902 --- /dev/null +++ b/examples/typescript/guides/ai-agents/worker.ts @@ -0,0 +1,15 @@ +import { hatchet } from '../../hatchet-client'; +import { agentTask, streamingAgentTask } from './workflow'; + +async function main() { + // > Step 04 Run Worker + const worker = await hatchet.worker('agent-worker', { + workflows: [agentTask, streamingAgentTask], + slots: 5, + }); + await worker.start(); +} + +if (require.main === module) { + main(); +} diff --git a/examples/typescript/guides/ai-agents/workflow.ts b/examples/typescript/guides/ai-agents/workflow.ts new file mode 100644 index 0000000000..33539998f4 --- /dev/null +++ b/examples/typescript/guides/ai-agents/workflow.ts @@ -0,0 +1,50 @@ +import { ConcurrencyLimitStrategy } from '@hatchet-dev/typescript-sdk/protoc/v1/workflows'; +import { hatchet } from '../../hatchet-client'; +import { callLlm, runTool } from './mock-agent'; + +// > Step 01 Define Agent Task +export const agentTask = hatchet.durableTask({ + name: 'reasoning-loop-agent', + executionTimeout: '30m', + concurrency: { + expression: "input.session_id != null ? string(input.session_id) : 'constant'", + maxRuns: 1, + limitStrategy: ConcurrencyLimitStrategy.CANCEL_IN_PROGRESS, + }, + fn: async (input) => { + const query = (input as { query?: string })?.query ?? 'Hello'; + return agentReasoningLoop(query); + }, +}); + +// > Step 02 Reasoning Loop +async function agentReasoningLoop(query: string) { + const messages: Array<{ role: string; content: string }> = [{ role: 'user', content: query }]; + for (let i = 0; i < 10; i++) { + const resp = callLlm(messages); + if (resp.done) return { response: resp.content }; + for (const tc of resp.toolCalls) { + const result = runTool(tc.name, tc.args); + messages.push({ role: 'tool', content: result }); + } + } + return { response: 'Max iterations reached' }; +} + +// > Step 03 Stream Response +export const streamingAgentTask = hatchet.durableTask({ + name: 'streaming-agent-task', + executionTimeout: '30m', + concurrency: { + expression: "'constant'", + maxRuns: 1, + limitStrategy: ConcurrencyLimitStrategy.CANCEL_IN_PROGRESS, + }, + fn: async (_, ctx) => { + const tokens = ['Hello', ' ', 'world', '!']; + for (const t of tokens) { + ctx.putStream(t); + } + return { done: true }; + }, +}); diff --git a/examples/typescript/guides/batch-processing/worker.ts b/examples/typescript/guides/batch-processing/worker.ts new file mode 100644 index 0000000000..f8ddae7135 --- /dev/null +++ b/examples/typescript/guides/batch-processing/worker.ts @@ -0,0 +1,15 @@ +import { hatchet } from '../../hatchet-client'; +import { parentTask, childTask } from './workflow'; + +async function main() { + // > Step 04 Run Worker + const worker = await hatchet.worker('batch-worker', { + workflows: [parentTask, childTask], + slots: 20, + }); + await worker.start(); +} + +if (require.main === module) { + main(); +} diff --git a/examples/typescript/guides/batch-processing/workflow.ts b/examples/typescript/guides/batch-processing/workflow.ts new file mode 100644 index 0000000000..92305ff697 --- /dev/null +++ b/examples/typescript/guides/batch-processing/workflow.ts @@ -0,0 +1,30 @@ +import { hatchet } from '../../hatchet-client'; + +type BatchInput = { items: string[] }; +type ItemInput = { item_id: string }; + +const childTask = hatchet.task({ + name: 'process-item', + fn: async (input) => ({ + status: 'done', + item_id: input.item_id, + }), +}); + +// > Step 01 Define Parent Task +const parentTask = hatchet.durableTask({ + name: 'spawn-children', + fn: async (input) => { + const results = await Promise.all( + input.items.map((itemId) => childTask.run({ item_id: itemId })) + ); + return { processed: results.length, results }; + }, +}); + +// > Step 03 Process Item +function processItem(input: ItemInput) { + return { status: 'done', item_id: input.item_id }; +} + +export { parentTask, childTask }; diff --git a/examples/typescript/guides/document-processing/mock-ocr.ts b/examples/typescript/guides/document-processing/mock-ocr.ts new file mode 100644 index 0000000000..552a29bc18 --- /dev/null +++ b/examples/typescript/guides/document-processing/mock-ocr.ts @@ -0,0 +1,5 @@ +/** Mock OCR - no external dependencies */ + +export function parseDocument(content: Uint8Array): string { + return `Parsed text from ${content.length} bytes`; +} diff --git a/examples/typescript/guides/document-processing/worker.ts b/examples/typescript/guides/document-processing/worker.ts new file mode 100644 index 0000000000..30304c2c32 --- /dev/null +++ b/examples/typescript/guides/document-processing/worker.ts @@ -0,0 +1,14 @@ +import { hatchet } from '../../hatchet-client'; +import { docWf } from './workflow'; + +async function main() { + // > Step 04 Run Worker + const worker = await hatchet.worker('document-worker', { + workflows: [docWf], + }); + await worker.start(); +} + +if (require.main === module) { + main(); +} diff --git a/examples/typescript/guides/document-processing/workflow.ts b/examples/typescript/guides/document-processing/workflow.ts new file mode 100644 index 0000000000..8bfedb0d0d --- /dev/null +++ b/examples/typescript/guides/document-processing/workflow.ts @@ -0,0 +1,38 @@ +import { hatchet } from '../../hatchet-client'; +import { parseDocument } from './mock-ocr'; + +type DocInput = { doc_id: string; content: Uint8Array }; + +// > Step 01 Define DAG +const docWf = hatchet.workflow({ name: 'DocumentPipeline' }); + +const ingest = docWf.task({ + name: 'ingest', + fn: async (input) => ({ doc_id: input.doc_id, content: input.content }), +}); + + +// > Step 02 Parse Stage +const parse = docWf.task({ + name: 'parse', + parents: [ingest], + fn: async (input, ctx) => { + const ingested = await ctx.parentOutput(ingest); + const text = parseDocument(ingested.content); + return { doc_id: input.doc_id, text }; + }, +}); + + +// > Step 03 Extract Stage +const extract = docWf.task({ + name: 'extract', + parents: [parse], + fn: async (input, ctx) => { + const parsed = await ctx.parentOutput(parse); + return { doc_id: parsed.doc_id, entities: ['entity1', 'entity2'] }; + }, +}); + + +export { docWf }; diff --git a/examples/typescript/guides/evaluator-optimizer/mock-llm.ts b/examples/typescript/guides/evaluator-optimizer/mock-llm.ts new file mode 100644 index 0000000000..23327e9a09 --- /dev/null +++ b/examples/typescript/guides/evaluator-optimizer/mock-llm.ts @@ -0,0 +1,16 @@ +let generateCount = 0; + +export function mockGenerate(prompt: string): string { + generateCount++; + if (generateCount === 1) { + return 'Check out our product! Buy now!'; + } + return 'Discover how our tool saves teams 10 hours/week. Try it free.'; +} + +export function mockEvaluate(draft: string): { score: number; feedback: string } { + if (draft.length < 40) { + return { score: 0.4, feedback: 'Too short and pushy. Add a specific benefit and soften the CTA.' }; + } + return { score: 0.9, feedback: 'Clear value prop, appropriate tone.' }; +} diff --git a/examples/typescript/guides/evaluator-optimizer/worker.ts b/examples/typescript/guides/evaluator-optimizer/worker.ts new file mode 100644 index 0000000000..797bd45309 --- /dev/null +++ b/examples/typescript/guides/evaluator-optimizer/worker.ts @@ -0,0 +1,15 @@ +import { hatchet } from '../../hatchet-client'; +import { generatorTask, evaluatorTask, optimizerTask } from './workflow'; + +async function main() { + // > Step 03 Run Worker + const worker = await hatchet.worker('evaluator-optimizer-worker', { + workflows: [generatorTask, evaluatorTask, optimizerTask], + slots: 5, + }); + await worker.start(); +} + +if (require.main === module) { + main(); +} diff --git a/examples/typescript/guides/evaluator-optimizer/workflow.ts b/examples/typescript/guides/evaluator-optimizer/workflow.ts new file mode 100644 index 0000000000..288ee23983 --- /dev/null +++ b/examples/typescript/guides/evaluator-optimizer/workflow.ts @@ -0,0 +1,70 @@ +import { hatchet } from '../../hatchet-client'; +import { mockGenerate, mockEvaluate } from './mock-llm'; + +type GeneratorInput = { + topic: string; + audience: string; + previousDraft?: string; + feedback?: string; +}; + +type EvaluatorInput = { + draft: string; + topic: string; + audience: string; +}; + +// > Step 01 Define Tasks +const generatorTask = hatchet.task({ + name: 'generate-draft', + fn: async (input: GeneratorInput) => { + const prompt = input.feedback + ? `Improve this draft.\n\nDraft: ${input.previousDraft}\nFeedback: ${input.feedback}` + : `Write a social media post about "${input.topic}" for ${input.audience}. Under 100 words.`; + return { draft: mockGenerate(prompt) }; + }, +}); + +const evaluatorTask = hatchet.task({ + name: 'evaluate-draft', + fn: async (input: EvaluatorInput) => { + return mockEvaluate(input.draft); + }, +}); + +// > Step 02 Optimization Loop +const optimizerTask = hatchet.durableTask({ + name: 'evaluator-optimizer', + executionTimeout: '5m', + fn: async (input: { topic: string; audience: string }) => { + const maxIterations = 3; + const threshold = 0.8; + let draft = ''; + let feedback = ''; + + for (let i = 0; i < maxIterations; i++) { + const generated = await generatorTask.run({ + topic: input.topic, + audience: input.audience, + previousDraft: draft || undefined, + feedback: feedback || undefined, + }); + draft = generated.draft; + + const evaluation = await evaluatorTask.run({ + draft, + topic: input.topic, + audience: input.audience, + }); + + if (evaluation.score >= threshold) { + return { draft, iterations: i + 1, score: evaluation.score }; + } + feedback = evaluation.feedback; + } + + return { draft, iterations: maxIterations, score: -1 }; + }, +}); + +export { generatorTask, evaluatorTask, optimizerTask }; diff --git a/examples/typescript/guides/event-driven/trigger.ts b/examples/typescript/guides/event-driven/trigger.ts new file mode 100644 index 0000000000..8e84694dcc --- /dev/null +++ b/examples/typescript/guides/event-driven/trigger.ts @@ -0,0 +1,8 @@ +import { hatchet } from '../../hatchet-client'; + +// > Step 03 Push Event +// Push an event to trigger the workflow. Use the same key as onEvents. +hatchet.event.push('order:created', { + message: 'Order #1234', + source: 'webhook', +}); diff --git a/examples/typescript/guides/event-driven/worker.ts b/examples/typescript/guides/event-driven/worker.ts new file mode 100644 index 0000000000..26ca92db39 --- /dev/null +++ b/examples/typescript/guides/event-driven/worker.ts @@ -0,0 +1,14 @@ +import { hatchet } from '../../hatchet-client'; +import { eventWf } from './workflow'; + +async function main() { + // > Step 04 Run Worker + const worker = await hatchet.worker('event-driven-worker', { + workflows: [eventWf], + }); + await worker.start(); +} + +if (require.main === module) { + main(); +} diff --git a/examples/typescript/guides/event-driven/workflow.ts b/examples/typescript/guides/event-driven/workflow.ts new file mode 100644 index 0000000000..945816625a --- /dev/null +++ b/examples/typescript/guides/event-driven/workflow.ts @@ -0,0 +1,23 @@ +import { hatchet } from '../../hatchet-client'; + +type EventInput = { message: string; source?: string }; + +// > Step 01 Define Event Task +const eventWf = hatchet.workflow({ + name: 'EventDrivenWorkflow', + onEvents: ['order:created', 'user:signup'], +}); + +eventWf.task({ + name: 'process-event', + fn: async (input) => ({ + processed: input.message, + source: input.source ?? 'api', + }), +}); + +// > Step 02 Register Event Trigger +// Push an event from your app to trigger the workflow. Use the same key as onEvents. +hatchet.event.push('order:created', { message: 'Order #1234', source: 'webhook' }); + +export { eventWf }; diff --git a/examples/typescript/guides/human-in-the-loop/trigger.ts b/examples/typescript/guides/human-in-the-loop/trigger.ts new file mode 100644 index 0000000000..42d1a19c8a --- /dev/null +++ b/examples/typescript/guides/human-in-the-loop/trigger.ts @@ -0,0 +1,10 @@ +import { hatchet } from '../../hatchet-client'; + +// > Step 03 Push Approval Event +// Include the runId so the event matches the specific task waiting for it. +export async function pushApproval(runId: string, approved: boolean, reason = '') { + await hatchet.event.push('approval:decision', { runId, approved, reason }); +} + +// Approve: await pushApproval('run-id-from-ui', true); +// Reject: await pushApproval('run-id-from-ui', false, 'needs review'); diff --git a/examples/typescript/guides/human-in-the-loop/worker.ts b/examples/typescript/guides/human-in-the-loop/worker.ts new file mode 100644 index 0000000000..3c82bb682e --- /dev/null +++ b/examples/typescript/guides/human-in-the-loop/worker.ts @@ -0,0 +1,14 @@ +import { hatchet } from '../../hatchet-client'; +import { approvalTask } from './workflow'; + +async function main() { + // > Step 04 Run Worker + const worker = await hatchet.worker('human-in-the-loop-worker', { + workflows: [approvalTask], + }); + await worker.start(); +} + +if (require.main === module) { + main(); +} diff --git a/examples/typescript/guides/human-in-the-loop/workflow.ts b/examples/typescript/guides/human-in-the-loop/workflow.ts new file mode 100644 index 0000000000..326a0dce83 --- /dev/null +++ b/examples/typescript/guides/human-in-the-loop/workflow.ts @@ -0,0 +1,27 @@ +import { DurableContext } from '@hatchet-dev/typescript-sdk'; +import { hatchet } from '../../hatchet-client'; + +const APPROVAL_EVENT_KEY = 'approval:decision'; + +// > Step 02 Wait For Event +function waitForApproval(ctx: DurableContext) { + const runId = ctx.workflowRunId(); + return ctx.waitFor({ + eventKey: APPROVAL_EVENT_KEY, + expression: `input.runId == '${runId}'`, + }); +} + +// > Step 01 Define Approval Task +export const approvalTask = hatchet.durableTask({ + name: 'approval-task', + executionTimeout: '30m', + fn: async (_, ctx) => { + const proposedAction = { action: 'send_email', to: 'user@example.com' }; + const approval = waitForApproval(ctx); + if (approval?.approved) { + return { status: 'approved', action: proposedAction }; + } + return { status: 'rejected', reason: approval?.reason ?? '' }; + }, +}); diff --git a/examples/typescript/guides/integrations/embedding-cohere.ts b/examples/typescript/guides/integrations/embedding-cohere.ts new file mode 100644 index 0000000000..ac7d8a5dfb --- /dev/null +++ b/examples/typescript/guides/integrations/embedding-cohere.ts @@ -0,0 +1,16 @@ +// Third-party integration - requires: pnpm add cohere-ai +// See: /guides/rag-and-indexing + +import Cohere from 'cohere-ai'; + +const client = new Cohere(); + +// > Cohere embedding usage +export async function embed(text: string): Promise { + const r = await client.embed({ + texts: [text], + model: 'embed-english-v3.0', + inputType: 'search_document', + }); + return r.embeddings[0] ?? []; +} diff --git a/examples/typescript/guides/integrations/embedding-openai.ts b/examples/typescript/guides/integrations/embedding-openai.ts new file mode 100644 index 0000000000..cd0f1dea22 --- /dev/null +++ b/examples/typescript/guides/integrations/embedding-openai.ts @@ -0,0 +1,15 @@ +// Third-party integration - requires: pnpm add openai +// See: /guides/rag-and-indexing + +import OpenAI from 'openai'; + +const client = new OpenAI(); + +// > OpenAI embedding usage +export async function embed(text: string): Promise { + const r = await client.embeddings.create({ + model: 'text-embedding-3-small', + input: text, + }); + return r.data[0]?.embedding ?? []; +} diff --git a/examples/typescript/guides/integrations/llm-anthropic.ts b/examples/typescript/guides/integrations/llm-anthropic.ts new file mode 100644 index 0000000000..18bd7fac00 --- /dev/null +++ b/examples/typescript/guides/integrations/llm-anthropic.ts @@ -0,0 +1,28 @@ +// Third-party integration - requires: pnpm add @anthropic-ai/sdk +// See: /guides/ai-agents + +import Anthropic from '@anthropic-ai/sdk'; + +const client = new Anthropic(); + +// > Anthropic usage +export async function complete(messages: Array<{ role: string; content: string }>) { + const resp = await client.messages.create({ + model: 'claude-3-5-haiku-20241022', + max_tokens: 1024, + messages: messages.map((m) => ({ role: m.role as 'user' | 'assistant', content: m.content })), + }); + const toolUse = resp.content.find((b) => b.type === 'tool_use'); + if (toolUse && toolUse.type === 'tool_use') { + return { + content: '', + toolCalls: [{ name: toolUse.name, args: toolUse.input }], + done: false, + }; + } + const text = resp.content + .filter((b): b is { type: 'text'; text: string } => b.type === 'text') + .map((b) => b.text) + .join(''); + return { content: text, toolCalls: [], done: true }; +} diff --git a/examples/typescript/guides/integrations/llm-groq.ts b/examples/typescript/guides/integrations/llm-groq.ts new file mode 100644 index 0000000000..2b7e79f046 --- /dev/null +++ b/examples/typescript/guides/integrations/llm-groq.ts @@ -0,0 +1,39 @@ +// Third-party integration - requires: pnpm add groq-sdk +// See: /guides/ai-agents + +import Groq from 'groq-sdk'; + +const client = new Groq(); + +// > Groq usage +export async function complete(messages: Array<{ role: string; content: string }>) { + const r = await client.chat.completions.create({ + model: 'llama-3.3-70b-versatile', + messages: messages as Groq.ChatCompletionMessageParam[], + tool_choice: 'auto', + tools: [ + { + type: 'function', + function: { + name: 'get_weather', + description: 'Get weather for a location', + parameters: { + type: 'object', + properties: { location: { type: 'string' } }, + required: ['location'], + }, + }, + }, + ], + }); + const msg = r.choices[0]?.message; + const toolCalls = (msg?.tool_calls ?? []).map((tc) => ({ + name: tc.function?.name ?? '', + args: JSON.parse(tc.function?.arguments ?? '{}'), + })); + return { + content: msg?.content ?? '', + toolCalls, + done: toolCalls.length === 0, + }; +} diff --git a/examples/typescript/guides/integrations/llm-openai.ts b/examples/typescript/guides/integrations/llm-openai.ts new file mode 100644 index 0000000000..f67867c338 --- /dev/null +++ b/examples/typescript/guides/integrations/llm-openai.ts @@ -0,0 +1,39 @@ +// Third-party integration - requires: pnpm add openai +// See: /guides/ai-agents + +import OpenAI from 'openai'; + +const client = new OpenAI(); + +// > OpenAI usage +export async function complete(messages: Array<{ role: string; content: string }>) { + const r = await client.chat.completions.create({ + model: 'gpt-4o-mini', + messages: messages as OpenAI.ChatCompletionMessageParam[], + tool_choice: 'auto', + tools: [ + { + type: 'function', + function: { + name: 'get_weather', + description: 'Get weather for a location', + parameters: { + type: 'object', + properties: { location: { type: 'string' } }, + required: ['location'], + }, + }, + }, + ], + }); + const msg = r.choices[0]?.message; + const toolCalls = (msg?.tool_calls ?? []).map((tc) => ({ + name: tc.function?.name ?? '', + args: JSON.parse(tc.function?.arguments ?? '{}'), + })); + return { + content: msg?.content ?? '', + toolCalls, + done: toolCalls.length === 0, + }; +} diff --git a/examples/typescript/guides/integrations/llm-vercel-ai-sdk.ts b/examples/typescript/guides/integrations/llm-vercel-ai-sdk.ts new file mode 100644 index 0000000000..ef82cd1275 --- /dev/null +++ b/examples/typescript/guides/integrations/llm-vercel-ai-sdk.ts @@ -0,0 +1,32 @@ +// Third-party integration - requires: pnpm add ai @ai-sdk/openai +// See: /guides/ai-agents +// Vercel AI SDK: unified interface for OpenAI, Anthropic, Google, etc. + +import { generateText, tool } from 'ai'; +import { openai } from '@ai-sdk/openai'; +import { z } from 'zod'; + +// > Vercel AI SDK usage +export async function complete(messages: Array<{ role: string; content: string }>) { + const tools = { + get_weather: tool({ + description: 'Get weather for a location', + parameters: z.object({ location: z.string() }), + execute: async ({ location }) => `Weather in ${location}: 72°F, sunny`, + }), + }; + const { text, toolCalls } = await generateText({ + model: openai('gpt-4o-mini'), + messages: messages.map((m) => ({ + role: m.role as 'user' | 'assistant' | 'system', + content: m.content, + })), + maxSteps: 5, // SDK runs tool loop internally + tools, + }); + return { + content: text, + tool_calls: toolCalls.map((tc) => ({ name: tc.toolName, args: tc.args })), + done: true, // maxSteps handles full agent loop + }; +} diff --git a/examples/typescript/guides/integrations/ocr-google-vision.ts b/examples/typescript/guides/integrations/ocr-google-vision.ts new file mode 100644 index 0000000000..ec06fabccc --- /dev/null +++ b/examples/typescript/guides/integrations/ocr-google-vision.ts @@ -0,0 +1,12 @@ +// Third-party integration - requires: pnpm add @google-cloud/vision +// See: /guides/document-processing + +import { ImageAnnotatorClient } from '@google-cloud/vision'; + +const client = new ImageAnnotatorClient(); + +// > Google Vision usage +export async function parseDocument(content: Buffer): Promise { + const [result] = await client.documentTextDetection({ image: { content } }); + return result.fullTextAnnotation?.text ?? ''; +} diff --git a/examples/typescript/guides/integrations/ocr-tesseract.ts b/examples/typescript/guides/integrations/ocr-tesseract.ts new file mode 100644 index 0000000000..77b0296cde --- /dev/null +++ b/examples/typescript/guides/integrations/ocr-tesseract.ts @@ -0,0 +1,10 @@ +// Third-party integration - requires: pnpm add tesseract.js +// See: /guides/document-processing + +import Tesseract from 'tesseract.js'; + +// > Tesseract usage +export async function parseDocument(content: Buffer): Promise { + const { data } = await Tesseract.recognize(content); + return data.text; +} diff --git a/examples/typescript/guides/integrations/scraper-browserbase.ts b/examples/typescript/guides/integrations/scraper-browserbase.ts new file mode 100644 index 0000000000..a490167c95 --- /dev/null +++ b/examples/typescript/guides/integrations/scraper-browserbase.ts @@ -0,0 +1,20 @@ +// Third-party integration - requires: pnpm add @browserbasehq/sdk playwright +// See: /guides/web-scraping + +import Browserbase from '@browserbasehq/sdk'; +import { chromium } from 'playwright'; + +const bb = new Browserbase({ apiKey: process.env.BROWSERBASE_API_KEY! }); + +// > Browserbase usage +export async function scrapeUrl(url: string) { + const session = await bb.sessions.create({ + projectId: process.env.BROWSERBASE_PROJECT_ID!, + }); + const browser = await chromium.connectOverCDP(session.connectUrl); + const page = browser.contexts()[0].pages()[0]; + await page.goto(url); + const content = await page.content(); + await browser.close(); + return { url, content }; +} diff --git a/examples/typescript/guides/integrations/scraper-firecrawl.ts b/examples/typescript/guides/integrations/scraper-firecrawl.ts new file mode 100644 index 0000000000..e650a5310e --- /dev/null +++ b/examples/typescript/guides/integrations/scraper-firecrawl.ts @@ -0,0 +1,16 @@ +// Third-party integration - requires: pnpm add @mendable/firecrawl-js +// See: /guides/web-scraping + +import FirecrawlApp from '@mendable/firecrawl-js'; + +const firecrawl = new FirecrawlApp({ apiKey: process.env.FIRECRAWL_API_KEY! }); + +// > Firecrawl usage +export async function scrapeUrl(url: string) { + const result = await firecrawl.scrapeUrl(url, { formats: ['markdown'] }); + return { + url, + content: result.markdown, + metadata: result.metadata, + }; +} diff --git a/examples/typescript/guides/integrations/scraper-openai.ts b/examples/typescript/guides/integrations/scraper-openai.ts new file mode 100644 index 0000000000..f32db08452 --- /dev/null +++ b/examples/typescript/guides/integrations/scraper-openai.ts @@ -0,0 +1,16 @@ +// Third-party integration - requires: pnpm add openai +// See: /guides/web-scraping + +import OpenAI from 'openai'; + +const openai = new OpenAI(); + +// > OpenAI web search usage +export async function searchAndExtract(query: string) { + const response = await openai.responses.create({ + model: 'gpt-4o-mini', + tools: [{ type: 'web_search' }], + input: query, + }); + return { query, content: response.output_text }; +} diff --git a/examples/typescript/guides/integrations/scraper-playwright.ts b/examples/typescript/guides/integrations/scraper-playwright.ts new file mode 100644 index 0000000000..0ef39b53d8 --- /dev/null +++ b/examples/typescript/guides/integrations/scraper-playwright.ts @@ -0,0 +1,14 @@ +// Third-party integration - requires: pnpm add playwright +// See: /guides/web-scraping + +import { chromium } from 'playwright'; + +// > Playwright usage +export async function scrapeUrl(url: string) { + const browser = await chromium.launch({ headless: true }); + const page = await browser.newPage(); + await page.goto(url); + const content = await page.content(); + await browser.close(); + return { url, content }; +} diff --git a/examples/typescript/guides/llm-pipelines/mock-llm.ts b/examples/typescript/guides/llm-pipelines/mock-llm.ts new file mode 100644 index 0000000000..43bd30b59d --- /dev/null +++ b/examples/typescript/guides/llm-pipelines/mock-llm.ts @@ -0,0 +1,5 @@ +/** Mock LLM - no external API dependencies */ + +export function generate(prompt: string): { content: string; valid: boolean } { + return { content: `Generated for: ${prompt.slice(0, 50)}...`, valid: true }; +} diff --git a/examples/typescript/guides/llm-pipelines/worker.ts b/examples/typescript/guides/llm-pipelines/worker.ts new file mode 100644 index 0000000000..ec59942903 --- /dev/null +++ b/examples/typescript/guides/llm-pipelines/worker.ts @@ -0,0 +1,14 @@ +import { hatchet } from '../../hatchet-client'; +import { llmWf } from './workflow'; + +async function main() { + // > Step 04 Run Worker + const worker = await hatchet.worker('llm-pipeline-worker', { + workflows: [llmWf], + }); + await worker.start(); +} + +if (require.main === module) { + main(); +} diff --git a/examples/typescript/guides/llm-pipelines/workflow.ts b/examples/typescript/guides/llm-pipelines/workflow.ts new file mode 100644 index 0000000000..06d230af8b --- /dev/null +++ b/examples/typescript/guides/llm-pipelines/workflow.ts @@ -0,0 +1,33 @@ +import { hatchet } from '../../hatchet-client'; +import { generate } from './mock-llm'; + +type PipelineInput = { prompt: string }; + +// > Step 01 Define Pipeline +const llmWf = hatchet.workflow({ name: 'LLMPipeline' }); + +const promptTask = llmWf.task({ + name: 'prompt-task', + fn: async (input) => ({ prompt: input.prompt }), +}); + + +// > Step 02 Prompt Task +function buildPrompt(userInput: string, context = ''): string { + return `Process the following: ${userInput}${context ? `\nContext: ${context}` : ''}`; +} + +// > Step 03 Validate Task +const generateTask = llmWf.task({ + name: 'generate-task', + parents: [promptTask], + fn: async (input, ctx) => { + const prev = await ctx.parentOutput(promptTask); + const output = generate(prev.prompt); + if (!output.valid) throw new Error('Validation failed'); + return output; + }, +}); + + +export { llmWf }; diff --git a/examples/typescript/guides/multi-agent/mock-llm.ts b/examples/typescript/guides/multi-agent/mock-llm.ts new file mode 100644 index 0000000000..f5648650f6 --- /dev/null +++ b/examples/typescript/guides/multi-agent/mock-llm.ts @@ -0,0 +1,23 @@ +let orchestratorCallCount = 0; + +export interface ToolCallResponse { + done: boolean; + content: string; + toolCall?: { name: string; args: { task: string } }; +} + +export function mockOrchestratorLlm(messages: Array<{ role: string; content: string }>): ToolCallResponse { + orchestratorCallCount++; + switch (orchestratorCallCount) { + case 1: + return { done: false, content: '', toolCall: { name: 'research', args: { task: 'Find key facts about the topic' } } }; + case 2: + return { done: false, content: '', toolCall: { name: 'writing', args: { task: 'Write a summary from the research' } } }; + default: + return { done: true, content: 'Here is the final report combining research and writing.' }; + } +} + +export function mockSpecialistLlm(task: string, role: string): string { + return `[${role}] Completed: ${task}`; +} diff --git a/examples/typescript/guides/multi-agent/worker.ts b/examples/typescript/guides/multi-agent/worker.ts new file mode 100644 index 0000000000..ef6c6dd608 --- /dev/null +++ b/examples/typescript/guides/multi-agent/worker.ts @@ -0,0 +1,15 @@ +import { hatchet } from '../../hatchet-client'; +import { researchTask, writingTask, codeTask, orchestrator } from './workflow'; + +async function main() { + // > Step 03 Run Worker + const worker = await hatchet.worker('multi-agent-worker', { + workflows: [researchTask, writingTask, codeTask, orchestrator], + slots: 10, + }); + await worker.start(); +} + +if (require.main === module) { + main(); +} diff --git a/examples/typescript/guides/multi-agent/workflow.ts b/examples/typescript/guides/multi-agent/workflow.ts new file mode 100644 index 0000000000..aa4d547bbe --- /dev/null +++ b/examples/typescript/guides/multi-agent/workflow.ts @@ -0,0 +1,69 @@ +import { hatchet } from '../../hatchet-client'; +import { mockOrchestratorLlm, mockSpecialistLlm } from './mock-llm'; + +type SpecialistInput = { task: string; context?: string }; + +// > Step 01 Specialist Agents +const researchTask = hatchet.durableTask({ + name: 'research-specialist', + executionTimeout: '3m', + fn: async (input: SpecialistInput) => { + return { result: mockSpecialistLlm(input.task, 'research') }; + }, +}); + +const writingTask = hatchet.durableTask({ + name: 'writing-specialist', + executionTimeout: '2m', + fn: async (input: SpecialistInput) => { + return { result: mockSpecialistLlm(input.task, 'writing') }; + }, +}); + +const codeTask = hatchet.durableTask({ + name: 'code-specialist', + executionTimeout: '2m', + fn: async (input: SpecialistInput) => { + return { result: mockSpecialistLlm(input.task, 'code') }; + }, +}); + +// > Step 02 Orchestrator Loop +const specialists: Record = { + research: researchTask, + writing: writingTask, + code: codeTask, +}; + +const orchestrator = hatchet.durableTask({ + name: 'multi-agent-orchestrator', + executionTimeout: '15m', + fn: async (input: { goal: string }) => { + const messages: Array<{ role: string; content: string }> = [ + { role: 'user', content: input.goal }, + ]; + + for (let i = 0; i < 10; i++) { + const response = mockOrchestratorLlm(messages); + + if (response.done) return { result: response.content }; + + const specialist = specialists[response.toolCall!.name]; + if (!specialist) throw new Error(`Unknown specialist: ${response.toolCall!.name}`); + + const { result } = await specialist.run({ + task: response.toolCall!.args.task, + context: messages.map((m) => m.content).join('\n'), + }); + + messages.push( + { role: 'assistant', content: `Called ${response.toolCall!.name}` }, + { role: 'tool', content: result } + ); + } + + return { result: 'Max iterations reached' }; + }, +}); + +export { researchTask, writingTask, codeTask, orchestrator }; diff --git a/examples/typescript/guides/package.json b/examples/typescript/guides/package.json new file mode 100644 index 0000000000..ce919099b7 --- /dev/null +++ b/examples/typescript/guides/package.json @@ -0,0 +1,34 @@ +{ + "name": "hatchet-guides-typescript", + "version": "0.0.0", + "private": true, + "description": "Hatchet guide examples (TypeScript) - docs snippets with integration deps", + "scripts": { + "lint:check": "eslint \"**/*.ts\"", + "lint:fix": "eslint . --fix" + }, + "dependencies": { + "@hatchet-dev/typescript-sdk": "^1.15.0", + "@anthropic-ai/sdk": "^0.32.1", + "@ai-sdk/openai": "^1.0.0", + "@browserbasehq/sdk": "^2.7.0", + "@google-cloud/vision": "^4.0.0", + "@mendable/firecrawl-js": "^4.15.0", + "ai": "^4.0.0", + "cohere-ai": "^7.0.0", + "groq-sdk": "^0.5.0", + "openai": "^4.0.0", + "playwright": "^1.49.0", + "tesseract.js": "^5.0.0", + "zod": "^3.24.0" + }, + "devDependencies": { + "@types/node": "^22.0.0", + "@typescript-eslint/eslint-plugin": "^6.21.0", + "@typescript-eslint/parser": "^6.21.0", + "eslint": "^8.56.0", + "eslint-config-airbnb-base": "^15.0.0", + "eslint-config-prettier": "^9.0.0", + "typescript": "^5.8.0" + } +} diff --git a/examples/typescript/guides/parallelization/mock-llm.ts b/examples/typescript/guides/parallelization/mock-llm.ts new file mode 100644 index 0000000000..8528634ce2 --- /dev/null +++ b/examples/typescript/guides/parallelization/mock-llm.ts @@ -0,0 +1,15 @@ +export function mockGenerateContent(message: string): string { + return `Here is a helpful response to: ${message}`; +} + +export function mockSafetyCheck(message: string): { safe: boolean; reason: string } { + if (message.toLowerCase().includes('unsafe')) { + return { safe: false, reason: 'Content flagged as potentially unsafe.' }; + } + return { safe: true, reason: 'Content is appropriate.' }; +} + +export function mockEvaluate(content: string): { score: number; approved: boolean } { + const score = content.length > 20 ? 0.85 : 0.3; + return { score, approved: score >= 0.7 }; +} diff --git a/examples/typescript/guides/parallelization/worker.ts b/examples/typescript/guides/parallelization/worker.ts new file mode 100644 index 0000000000..e897a35986 --- /dev/null +++ b/examples/typescript/guides/parallelization/worker.ts @@ -0,0 +1,15 @@ +import { hatchet } from '../../hatchet-client'; +import { contentTask, safetyTask, evaluateTask, sectioningTask, votingTask } from './workflow'; + +async function main() { + // > Step 04 Run Worker + const worker = await hatchet.worker('parallelization-worker', { + workflows: [contentTask, safetyTask, evaluateTask, sectioningTask, votingTask], + slots: 10, + }); + await worker.start(); +} + +if (require.main === module) { + main(); +} diff --git a/examples/typescript/guides/parallelization/workflow.ts b/examples/typescript/guides/parallelization/workflow.ts new file mode 100644 index 0000000000..a851597b58 --- /dev/null +++ b/examples/typescript/guides/parallelization/workflow.ts @@ -0,0 +1,67 @@ +import { hatchet } from '../../hatchet-client'; +import { mockGenerateContent, mockSafetyCheck, mockEvaluate } from './mock-llm'; + +type MessageInput = { message: string }; + +// > Step 01 Parallel Tasks +const contentTask = hatchet.task({ + name: 'generate-content', + fn: async (input: MessageInput) => { + return { content: mockGenerateContent(input.message) }; + }, +}); + +const safetyTask = hatchet.task({ + name: 'safety-check', + fn: async (input: MessageInput) => { + return mockSafetyCheck(input.message); + }, +}); + +const evaluateTask = hatchet.task({ + name: 'evaluate-content', + fn: async (input: { content: string }) => { + return mockEvaluate(input.content); + }, +}); + +// > Step 02 Sectioning +const sectioningTask = hatchet.durableTask({ + name: 'parallel-sectioning', + executionTimeout: '2m', + fn: async (input: MessageInput) => { + const [content, safety] = await Promise.all([ + contentTask.run(input), + safetyTask.run(input), + ]); + + if (!safety.safe) { + return { blocked: true, reason: safety.reason }; + } + return { blocked: false, content: content.content }; + }, +}); + +// > Step 03 Voting +const votingTask = hatchet.durableTask({ + name: 'parallel-voting', + executionTimeout: '3m', + fn: async (input: { content: string }) => { + const votes = await Promise.all([ + evaluateTask.run(input), + evaluateTask.run(input), + evaluateTask.run(input), + ]); + + const approvals = votes.filter((v) => v.approved).length; + const avgScore = votes.reduce((sum, v) => sum + v.score, 0) / votes.length; + + return { + approved: approvals >= 2, + averageScore: avgScore, + votes: votes.length, + }; + }, +}); + +export { contentTask, safetyTask, evaluateTask, sectioningTask, votingTask }; diff --git a/examples/typescript/guides/rag-and-indexing/mock-embedding.ts b/examples/typescript/guides/rag-and-indexing/mock-embedding.ts new file mode 100644 index 0000000000..2fa488c4da --- /dev/null +++ b/examples/typescript/guides/rag-and-indexing/mock-embedding.ts @@ -0,0 +1,5 @@ +/** Mock embedding - no external API dependencies */ + +export function embed(text: string): number[] { + return Array(64).fill(0.1); +} diff --git a/examples/typescript/guides/rag-and-indexing/worker.ts b/examples/typescript/guides/rag-and-indexing/worker.ts new file mode 100644 index 0000000000..07f5820033 --- /dev/null +++ b/examples/typescript/guides/rag-and-indexing/worker.ts @@ -0,0 +1,14 @@ +import { hatchet } from '../../hatchet-client'; +import { ragWf, embedChunkTask, queryTask } from './workflow'; + +async function main() { + // > Step 06 Run Worker + const worker = await hatchet.worker('rag-worker', { + workflows: [ragWf, embedChunkTask, queryTask], + }); + await worker.start(); +} + +if (require.main === module) { + main(); +} diff --git a/examples/typescript/guides/rag-and-indexing/workflow.ts b/examples/typescript/guides/rag-and-indexing/workflow.ts new file mode 100644 index 0000000000..66378e3a2a --- /dev/null +++ b/examples/typescript/guides/rag-and-indexing/workflow.ts @@ -0,0 +1,58 @@ +import { hatchet } from '../../hatchet-client'; +import { embed } from './mock-embedding'; + +// > Step 01 Define Workflow +type DocInput = { doc_id: string; content: string }; + +const ragWf = hatchet.workflow({ name: 'RAGPipeline' }); + +// > Step 02 Define Ingest Task +const ingest = ragWf.task({ + name: 'ingest', + fn: async (input) => ({ doc_id: input.doc_id, content: input.content }), +}); + + +// > Step 03 Chunk Task +function chunkContent(content: string, chunkSize = 100): string[] { + const chunks: string[] = []; + for (let i = 0; i < content.length; i += chunkSize) { + chunks.push(content.slice(i, i + chunkSize)); + } + return chunks; +} + +// > Step 04 Embed Task +const embedChunkTask = hatchet.task<{ chunk: string }>({ + name: 'embed-chunk', + fn: async (input) => ({ vector: embed(input.chunk) }), +}); + +const chunkAndEmbed = ragWf.durableTask({ + name: 'chunk-and-embed', + parents: [ingest], + fn: async (input, ctx) => { + const ingested = await ctx.parentOutput(ingest); + const chunks: string[] = []; + for (let i = 0; i < ingested.content.length; i += 100) { + chunks.push(ingested.content.slice(i, i + 100)); + } + const results = await Promise.all(chunks.map((chunk) => embedChunkTask.run({ chunk }))); + return { doc_id: ingested.doc_id, vectors: results.map((r) => r.vector) }; + }, +}); + + +// > Step 05 Query Task +type QueryInput = { query: string; top_k?: number }; + +const queryTask = hatchet.durableTask({ + name: 'rag-query', + fn: async (input) => { + const { vector } = await embedChunkTask.run({ chunk: input.query }); + // Replace with a real vector DB lookup in production + return { query: input.query, vector, results: [] }; + }, +}); + +export { ragWf, embedChunkTask, queryTask }; diff --git a/examples/typescript/guides/routing/mock-classifier.ts b/examples/typescript/guides/routing/mock-classifier.ts new file mode 100644 index 0000000000..0f73943b03 --- /dev/null +++ b/examples/typescript/guides/routing/mock-classifier.ts @@ -0,0 +1,17 @@ +export function mockClassify(message: string): string { + const lower = message.toLowerCase(); + if (lower.includes('bug') || lower.includes('error') || lower.includes('help')) return 'support'; + if (lower.includes('price') || lower.includes('buy') || lower.includes('plan')) return 'sales'; + return 'other'; +} + +export function mockReply(message: string, role: string): string { + switch (role) { + case 'support': + return `[Support] I can help with that technical issue. Let me look into: ${message}`; + case 'sales': + return `[Sales] Great question about pricing! Here's what I can tell you about: ${message}`; + default: + return `[General] Thanks for reaching out. Regarding: ${message}`; + } +} diff --git a/examples/typescript/guides/routing/worker.ts b/examples/typescript/guides/routing/worker.ts new file mode 100644 index 0000000000..d528a8f3c1 --- /dev/null +++ b/examples/typescript/guides/routing/worker.ts @@ -0,0 +1,15 @@ +import { hatchet } from '../../hatchet-client'; +import { classifyTask, supportTask, salesTask, defaultTask, routerTask } from './workflow'; + +async function main() { + // > Step 04 Run Worker + const worker = await hatchet.worker('routing-worker', { + workflows: [classifyTask, supportTask, salesTask, defaultTask, routerTask], + slots: 5, + }); + await worker.start(); +} + +if (require.main === module) { + main(); +} diff --git a/examples/typescript/guides/routing/workflow.ts b/examples/typescript/guides/routing/workflow.ts new file mode 100644 index 0000000000..b38b9b242b --- /dev/null +++ b/examples/typescript/guides/routing/workflow.ts @@ -0,0 +1,54 @@ +import { hatchet } from '../../hatchet-client'; +import { mockClassify, mockReply } from './mock-classifier'; + +type MessageInput = { message: string }; + +// > Step 01 Classify Task +const classifyTask = hatchet.durableTask({ + name: 'classify-message', + fn: async (input: MessageInput) => { + return { category: mockClassify(input.message) }; + }, +}); + +// > Step 02 Specialist Tasks +const supportTask = hatchet.durableTask({ + name: 'handle-support', + fn: async (input: MessageInput) => { + return { response: mockReply(input.message, 'support'), category: 'support' }; + }, +}); + +const salesTask = hatchet.durableTask({ + name: 'handle-sales', + fn: async (input: MessageInput) => { + return { response: mockReply(input.message, 'sales'), category: 'sales' }; + }, +}); + +const defaultTask = hatchet.durableTask({ + name: 'handle-default', + fn: async (input: MessageInput) => { + return { response: mockReply(input.message, 'other'), category: 'other' }; + }, +}); + +// > Step 03 Router Task +const routerTask = hatchet.durableTask({ + name: 'message-router', + executionTimeout: '2m', + fn: async (input: MessageInput) => { + const { category } = await classifyTask.run(input); + + switch (category) { + case 'support': + return supportTask.run(input); + case 'sales': + return salesTask.run(input); + default: + return defaultTask.run(input); + } + }, +}); + +export { classifyTask, supportTask, salesTask, defaultTask, routerTask }; diff --git a/examples/typescript/guides/scheduled-jobs/trigger.ts b/examples/typescript/guides/scheduled-jobs/trigger.ts new file mode 100644 index 0000000000..c390aa7417 --- /dev/null +++ b/examples/typescript/guides/scheduled-jobs/trigger.ts @@ -0,0 +1,6 @@ +import { hatchet } from '../../hatchet-client'; + +// > Step 02 Schedule One Time +// Schedule a one-time run at a specific time. +const runAt = new Date(Date.now() + 60 * 60 * 1000); +await hatchet.scheduled.create('ScheduledWorkflow', { triggerAt: runAt, input: {} }); diff --git a/examples/typescript/guides/scheduled-jobs/worker.ts b/examples/typescript/guides/scheduled-jobs/worker.ts new file mode 100644 index 0000000000..81ab831963 --- /dev/null +++ b/examples/typescript/guides/scheduled-jobs/worker.ts @@ -0,0 +1,14 @@ +import { hatchet } from '../../hatchet-client'; +import { cronWf } from './workflow'; + +async function main() { + // > Step 03 Run Worker + const worker = await hatchet.worker('scheduled-worker', { + workflows: [cronWf], + }); + await worker.start(); +} + +if (require.main === module) { + main(); +} diff --git a/examples/typescript/guides/scheduled-jobs/workflow.ts b/examples/typescript/guides/scheduled-jobs/workflow.ts new file mode 100644 index 0000000000..1696c1ca2a --- /dev/null +++ b/examples/typescript/guides/scheduled-jobs/workflow.ts @@ -0,0 +1,14 @@ +import { hatchet } from '../../hatchet-client'; + +// > Step 01 Define Cron Task +const cronWf = hatchet.workflow({ + name: 'ScheduledWorkflow', + on: { cron: '0 * * * *' }, +}); + +cronWf.task({ + name: 'run-scheduled-job', + fn: async () => ({ status: 'completed', job: 'maintenance' }), +}); + +export { cronWf }; diff --git a/examples/typescript/guides/streaming/client.ts b/examples/typescript/guides/streaming/client.ts new file mode 100644 index 0000000000..d903577985 --- /dev/null +++ b/examples/typescript/guides/streaming/client.ts @@ -0,0 +1,10 @@ +import { streamTask } from './workflow'; + +// > Step 03 Subscribe Client +// Client triggers the task and subscribes to the stream. +async function runAndSubscribe() { + const run = await streamTask.run({}); + for await (const chunk of run.stream()) { + console.log(chunk); + } +} diff --git a/examples/typescript/guides/streaming/worker.ts b/examples/typescript/guides/streaming/worker.ts new file mode 100644 index 0000000000..485f8aabe4 --- /dev/null +++ b/examples/typescript/guides/streaming/worker.ts @@ -0,0 +1,14 @@ +import { hatchet } from '../../hatchet-client'; +import { streamTask } from './workflow'; + +async function main() { + // > Step 04 Run Worker + const worker = await hatchet.worker('streaming-worker', { + workflows: [streamTask], + }); + await worker.start(); +} + +if (require.main === module) { + main(); +} diff --git a/examples/typescript/guides/streaming/workflow.ts b/examples/typescript/guides/streaming/workflow.ts new file mode 100644 index 0000000000..067e950d7e --- /dev/null +++ b/examples/typescript/guides/streaming/workflow.ts @@ -0,0 +1,27 @@ +import { ConcurrencyLimitStrategy } from '@hatchet-dev/typescript-sdk/protoc/v1/workflows'; +import { hatchet } from '../../hatchet-client'; + +// > Step 01 Define Streaming Task +export const streamTask = hatchet.task({ + name: 'stream-example', + concurrency: { + expression: "'constant'", + maxRuns: 1, + limitStrategy: ConcurrencyLimitStrategy.CANCEL_IN_PROGRESS, + }, + fn: async (_, ctx) => { + for (let i = 0; i < 5; i++) { + ctx.putStream(`chunk-${i}`); + await new Promise((r) => setTimeout(r, 500)); + } + return { status: 'done' }; + }, +}); + +// > Step 02 Emit Chunks +async function emitChunks(ctx: { putStream: (chunk: string) => void }) { + for (let i = 0; i < 5; i++) { + ctx.putStream(`chunk-${i}`); + await new Promise((r) => setTimeout(r, 500)); + } +} diff --git a/examples/typescript/guides/web-scraping/mock-scraper.ts b/examples/typescript/guides/web-scraping/mock-scraper.ts new file mode 100644 index 0000000000..a75f27fd7e --- /dev/null +++ b/examples/typescript/guides/web-scraping/mock-scraper.ts @@ -0,0 +1,22 @@ +export interface ScrapeResult { + url: string; + title: string; + content: string; + scrapedAt: string; +} + +export function mockScrape(url: string): ScrapeResult { + return { + url, + title: `Page: ${url}`, + content: `Mock scraped content from ${url}. In production, use Firecrawl, Browserbase, or Playwright here.`, + scrapedAt: new Date().toISOString(), + }; +} + +export function mockExtract(content: string): Record { + return { + summary: content.slice(0, 80), + wordCount: String(content.split(' ').length), + }; +} diff --git a/examples/typescript/guides/web-scraping/worker.ts b/examples/typescript/guides/web-scraping/worker.ts new file mode 100644 index 0000000000..875b1c0e58 --- /dev/null +++ b/examples/typescript/guides/web-scraping/worker.ts @@ -0,0 +1,22 @@ +import { RateLimitDuration } from '@hatchet-dev/typescript-sdk/protoc/v1/workflows'; +import { hatchet } from '../../hatchet-client'; +import { scrapeTask, processTask, scrapeWorkflow, rateLimitedScrapeTask, SCRAPE_RATE_LIMIT_KEY } from './workflow'; + +async function main() { + // > Step 05 Run Worker + await hatchet.ratelimits.upsert({ + key: SCRAPE_RATE_LIMIT_KEY, + limit: 10, + duration: RateLimitDuration.MINUTE, + }); + + const worker = await hatchet.worker('web-scraping-worker', { + workflows: [scrapeTask, processTask, scrapeWorkflow, rateLimitedScrapeTask], + slots: 5, + }); + await worker.start(); +} + +if (require.main === module) { + main(); +} diff --git a/examples/typescript/guides/web-scraping/workflow.ts b/examples/typescript/guides/web-scraping/workflow.ts new file mode 100644 index 0000000000..f09e22b1fa --- /dev/null +++ b/examples/typescript/guides/web-scraping/workflow.ts @@ -0,0 +1,70 @@ +import { hatchet } from '../../hatchet-client'; +import { mockScrape } from './mock-scraper'; + +type ScrapeInput = { url: string }; + +// > Step 01 Define Scrape Task +const scrapeTask = hatchet.task({ + name: 'scrape-url', + executionTimeout: '2m', + retries: 2, + fn: async (input: ScrapeInput) => { + return mockScrape(input.url); + }, +}); + +// > Step 02 Process Content +const processTask = hatchet.task({ + name: 'process-content', + fn: async (input: { url: string; content: string }) => { + const links = [...input.content.matchAll(/https?:\/\/[^\s<>"']+/g)].map((m) => m[0]); + const summary = input.content.slice(0, 200).trim(); + const wordCount = input.content.split(/\s+/).filter(Boolean).length; + return { summary, wordCount, links }; + }, +}); + +// > Step 03 Cron Workflow +const scrapeWorkflow = hatchet.workflow({ + name: 'WebScrapeWorkflow', + on: { cron: '0 */6 * * *' }, +}); + +scrapeWorkflow.task({ + name: 'scheduled-scrape', + fn: async () => { + const urls = [ + 'https://example.com/pricing', + 'https://example.com/blog', + 'https://example.com/docs', + ]; + + const results = []; + for (const url of urls) { + const scraped = await scrapeTask.run({ url }); + const processed = await processTask.run({ url, content: scraped.content }); + results.push({ url, ...processed }); + } + return { refreshed: results.length, results }; + }, +}); + +// > Step 04 Rate Limited Scrape +const SCRAPE_RATE_LIMIT_KEY = 'scrape-rate-limit'; + +const rateLimitedScrapeTask = hatchet.task({ + name: 'rate-limited-scrape', + executionTimeout: '2m', + retries: 2, + rateLimits: [ + { + staticKey: SCRAPE_RATE_LIMIT_KEY, + units: 1, + }, + ], + fn: async (input: ScrapeInput) => { + return mockScrape(input.url); + }, +}); + +export { scrapeTask, processTask, scrapeWorkflow, rateLimitedScrapeTask, SCRAPE_RATE_LIMIT_KEY }; diff --git a/examples/typescript/guides/webhook-processing/worker.ts b/examples/typescript/guides/webhook-processing/worker.ts new file mode 100644 index 0000000000..7e694d5216 --- /dev/null +++ b/examples/typescript/guides/webhook-processing/worker.ts @@ -0,0 +1,14 @@ +import { hatchet } from '../../hatchet-client'; +import { processWebhook } from './workflow'; + +async function main() { + // > Step 04 Run Worker + const worker = await hatchet.worker('webhook-worker', { + workflows: [processWebhook], + }); + await worker.start(); +} + +if (require.main === module) { + main(); +} diff --git a/examples/typescript/guides/webhook-processing/workflow.ts b/examples/typescript/guides/webhook-processing/workflow.ts new file mode 100644 index 0000000000..1ba8b429f7 --- /dev/null +++ b/examples/typescript/guides/webhook-processing/workflow.ts @@ -0,0 +1,29 @@ +import { hatchet } from '../../hatchet-client'; + +type WebhookPayload = { event_id: string; type: string; data: Record }; + +// > Step 01 Define Webhook Task +const processWebhook = hatchet.task({ + name: 'process-webhook', + onEvents: ['webhook:stripe', 'webhook:github'], + fn: async (input) => ({ + processed: input.event_id, + type: input.type, + }), +}); + +// > Step 02 Register Webhook +// Call from your webhook endpoint to trigger the task. +function forwardWebhook(eventKey: string, payload: WebhookPayload) { + hatchet.event.push(eventKey, payload); +} +// forwardWebhook('webhook:stripe', { event_id: 'evt_123', type: 'payment', data: {} }); + +// > Step 03 Process Payload +// Validate event_id for deduplication; process idempotently. +function validateAndProcess(input: WebhookPayload) { + if (!input.event_id) throw new Error('event_id required for deduplication'); + return { processed: input.event_id, type: input.type }; +} + +export { processWebhook }; diff --git a/frontend/app/src/pages/main/v1/events/index.tsx b/frontend/app/src/pages/main/v1/events/index.tsx index 9fad400d1c..d53b18ca90 100644 --- a/frontend/app/src/pages/main/v1/events/index.tsx +++ b/frontend/app/src/pages/main/v1/events/index.tsx @@ -141,7 +141,7 @@ export default function Events() {

No events found

diff --git a/frontend/app/src/pages/main/v1/filters/index.tsx b/frontend/app/src/pages/main/v1/filters/index.tsx index 55a6712a43..8b78ac3704 100644 --- a/frontend/app/src/pages/main/v1/filters/index.tsx +++ b/frontend/app/src/pages/main/v1/filters/index.tsx @@ -97,7 +97,7 @@ export default function Filters() {

No filters found

diff --git a/frontend/app/src/pages/main/v1/rate-limits/index.tsx b/frontend/app/src/pages/main/v1/rate-limits/index.tsx index 2e0bcd7e15..053ddd9ed2 100644 --- a/frontend/app/src/pages/main/v1/rate-limits/index.tsx +++ b/frontend/app/src/pages/main/v1/rate-limits/index.tsx @@ -67,7 +67,7 @@ function RateLimitsTable() {

No rate limits found

diff --git a/frontend/app/src/pages/main/v1/recurring/index.tsx b/frontend/app/src/pages/main/v1/recurring/index.tsx index b72472d005..9f3afdb56e 100644 --- a/frontend/app/src/pages/main/v1/recurring/index.tsx +++ b/frontend/app/src/pages/main/v1/recurring/index.tsx @@ -146,7 +146,7 @@ export default function CronsTable() {

No crons found

diff --git a/frontend/app/src/pages/main/v1/scheduled-runs/index.tsx b/frontend/app/src/pages/main/v1/scheduled-runs/index.tsx index a136a95401..33ad89de18 100644 --- a/frontend/app/src/pages/main/v1/scheduled-runs/index.tsx +++ b/frontend/app/src/pages/main/v1/scheduled-runs/index.tsx @@ -267,7 +267,7 @@ export default function ScheduledRunsTable({

No runs found

diff --git a/frontend/app/src/pages/main/v1/webhooks/index.tsx b/frontend/app/src/pages/main/v1/webhooks/index.tsx index 1e5558632f..41aa4e3fe2 100644 --- a/frontend/app/src/pages/main/v1/webhooks/index.tsx +++ b/frontend/app/src/pages/main/v1/webhooks/index.tsx @@ -124,7 +124,7 @@ export default function Webhooks() {

No webhooks found

diff --git a/frontend/app/src/pages/main/v1/workers/$worker/index.tsx b/frontend/app/src/pages/main/v1/workers/$worker/index.tsx index 5608d28e46..fb49f6e64f 100644 --- a/frontend/app/src/pages/main/v1/workers/$worker/index.tsx +++ b/frontend/app/src/pages/main/v1/workers/$worker/index.tsx @@ -328,7 +328,7 @@ export default function WorkerDetail() { Slots represent concurrent task runs.{' '} @@ -403,7 +403,7 @@ export default function WorkerDetail() { workers.{' '} diff --git a/frontend/app/src/pages/main/v1/workers/index.tsx b/frontend/app/src/pages/main/v1/workers/index.tsx index b1ecdfaef5..be6dca4be9 100644 --- a/frontend/app/src/pages/main/v1/workers/index.tsx +++ b/frontend/app/src/pages/main/v1/workers/index.tsx @@ -88,7 +88,7 @@ export default function Workers() {

No workers found

diff --git a/frontend/app/src/pages/main/v1/workflow-runs-v1/components/runs-table.tsx b/frontend/app/src/pages/main/v1/workflow-runs-v1/components/runs-table.tsx index 3c0a083c6b..0d435c7063 100644 --- a/frontend/app/src/pages/main/v1/workflow-runs-v1/components/runs-table.tsx +++ b/frontend/app/src/pages/main/v1/workflow-runs-v1/components/runs-table.tsx @@ -260,7 +260,7 @@ export function RunsTable({ leftLabel }: { leftLabel?: string }) {

No runs found

diff --git a/frontend/app/src/pages/main/v1/workflows/index.tsx b/frontend/app/src/pages/main/v1/workflows/index.tsx index 654fe15bec..1ca3d16683 100644 --- a/frontend/app/src/pages/main/v1/workflows/index.tsx +++ b/frontend/app/src/pages/main/v1/workflows/index.tsx @@ -47,7 +47,7 @@ export default function WorkflowTable() {

No workflows found

diff --git a/frontend/docs/components/AgentLoopDiagram.tsx b/frontend/docs/components/AgentLoopDiagram.tsx new file mode 100644 index 0000000000..4cdc790730 --- /dev/null +++ b/frontend/docs/components/AgentLoopDiagram.tsx @@ -0,0 +1,306 @@ +import React, { useState, useEffect } from "react"; +import { brand, state, inactive, container } from "./diagram-colors"; + +const PHASES = ["reason", "action", "observation"] as const; +type Phase = (typeof PHASES)[number]; + +const PHASE_CONFIG: Record = { + reason: { label: "Reason", color: brand.blue }, + action: { label: "Action", color: brand.magenta }, + observation: { label: "Observation", color: state.running }, +}; + +/** Small SVG icons rendered inline, no emojis */ +const PhaseIcon: React.FC<{ phase: Phase; active: boolean }> = ({ + phase, + active, +}) => { + const color = active ? PHASE_CONFIG[phase].color : inactive.text; + const size = 18; + + switch (phase) { + case "reason": + return ( + + + + + + ); + case "action": + return ( + + + + ); + case "observation": + return ( + + + + + ); + } +}; + +const AgentLoopDiagram: React.FC = () => { + const [phaseIdx, setPhaseIdx] = useState(0); + const [iteration, setIteration] = useState(1); + + useEffect(() => { + const interval = setInterval(() => { + setPhaseIdx((prev) => { + if (prev === PHASES.length - 1) { + setIteration((i) => (i >= 3 ? 1 : i + 1)); + return 0; + } + return prev + 1; + }); + }, 1400); + return () => clearInterval(interval); + }, []); + + const phase = PHASES[phaseIdx]; + + const svgW = 520; + const svgH = 160; + const nodeY = 70; + const nodeSpacing = 160; + const startX = 100; + + const nodes = PHASES.map((_, i) => ({ + x: startX + i * nodeSpacing, + y: nodeY, + })); + + return ( +
+ + + + + + + + + + + + + + + + + {/* Forward arrows between nodes */} + {nodes.slice(0, -1).map((from, i) => { + const to = nodes[i + 1]; + const isActive = phaseIdx === i; + const arrowMarkerId = isActive ? `arrow-${PHASES[i]}` : "arrow"; + return ( + + ); + })} + + {/* Return arrow: curved path from Observation back to Reason */} + {(() => { + const from = nodes[nodes.length - 1]; + const to = nodes[0]; + const isActive = phaseIdx === PHASES.length - 1; + const curveY = nodeY + 58; + return ( + + ); + })()} + + {/* Loop label on return arrow */} + + iteration {iteration}/3 + + + {/* Phase nodes */} + {PHASES.map((p, i) => { + const pos = nodes[i]; + const config = PHASE_CONFIG[p]; + const isActive = phase === p; + + return ( + + {isActive && ( + + + + )} + + + + + + {config.label} + + + ); + })} + + + {/* Status indicators */} +
+ {PHASES.map((p) => ( +
+ + {PHASE_CONFIG[p].label} +
+ ))} +
+ + ); +}; + +export default AgentLoopDiagram; diff --git a/frontend/docs/components/AgentLoopDiagramWrapper.tsx b/frontend/docs/components/AgentLoopDiagramWrapper.tsx new file mode 100644 index 0000000000..b015e3f552 --- /dev/null +++ b/frontend/docs/components/AgentLoopDiagramWrapper.tsx @@ -0,0 +1,7 @@ +import dynamic from "next/dynamic"; + +const AgentLoopDiagram = dynamic(() => import("./AgentLoopDiagram"), { + ssr: false, +}); + +export default AgentLoopDiagram; diff --git a/frontend/docs/components/BatchProcessingDiagram.tsx b/frontend/docs/components/BatchProcessingDiagram.tsx new file mode 100644 index 0000000000..09cb8a1bff --- /dev/null +++ b/frontend/docs/components/BatchProcessingDiagram.tsx @@ -0,0 +1,312 @@ +import React, { useState, useEffect } from "react"; +import { brand, state, inactive, container, fill } from "./diagram-colors"; + +const ITEMS = Array.from({ length: 8 }, (_, i) => i); + +/** SVG checkmark for completed items */ +const CheckIcon: React.FC<{ x: number; y: number; color: string }> = ({ + x, + y, + color, +}) => ( + + + + + +); + +const CONCURRENCY = 3; + +const BatchProcessingDiagram: React.FC = () => { + const [completedCount, setCompletedCount] = useState(0); + + useEffect(() => { + const interval = setInterval(() => { + setCompletedCount((prev) => { + if (prev >= ITEMS.length) return 0; + return Math.min(prev + CONCURRENCY, ITEMS.length); + }); + }, 800); + return () => clearInterval(interval); + }, []); + + const colors = { + pending: "#1C2B4A", + processing: "#EAB308", + done: "#22C55E", + }; + + return ( +
+ + {/* Trigger box */} + + {/* List icon */} + + + + + + + + + + + + Batch Input + + + {/* Arrow from trigger to items */} + + + + {/* Fan-out items grid (2 rows x 4 cols) */} + {ITEMS.map((item) => { + const col = item % 4; + const row = Math.floor(item / 4); + const x = 120 + col * 52; + const y = 35 + row * 55; + + let status: "pending" | "processing" | "done"; + if (item < completedCount) { + status = "done"; + } else if ( + item < completedCount + CONCURRENCY && + item < ITEMS.length + ) { + status = "processing"; + } else { + status = "pending"; + } + + const color = colors[status]; + + return ( + + {status === "processing" && ( + + + + )} + + {/* Status indicator */} + {status === "done" ? ( + + ) : status === "processing" ? ( + // Spinning indicator + + + + + + + + ) : ( + // File icon for pending + + + + + + + )} + + Item {item + 1} + + + ); + })} + + {/* Arrow from items to results */} + + + + {/* Results box */} + + {/* Bar chart icon */} + + + + + + + + + Results + + + + {/* Progress bar + concurrency label */} +
+
+
+
+
+ + {completedCount}/{ITEMS.length} + +
+ + {CONCURRENCY} in parallel + +
+
+ ); +}; + +export default BatchProcessingDiagram; diff --git a/frontend/docs/components/BatchProcessingDiagramWrapper.tsx b/frontend/docs/components/BatchProcessingDiagramWrapper.tsx new file mode 100644 index 0000000000..6500214f8f --- /dev/null +++ b/frontend/docs/components/BatchProcessingDiagramWrapper.tsx @@ -0,0 +1,8 @@ +import dynamic from "next/dynamic"; + +const BatchProcessingDiagram = dynamic( + () => import("./BatchProcessingDiagram"), + { ssr: false }, +); + +export default BatchProcessingDiagram; diff --git a/frontend/docs/components/BranchingDiagram.tsx b/frontend/docs/components/BranchingDiagram.tsx new file mode 100644 index 0000000000..9b968e536e --- /dev/null +++ b/frontend/docs/components/BranchingDiagram.tsx @@ -0,0 +1,343 @@ +import React, { useState } from "react"; +import { brand, state, fill, inactive, gradient } from "./diagram-colors"; + +const BranchingDiagram: React.FC = () => { + const [isLeft, setIsLeft] = useState(true); + + const nodeWidth = 140; + const nodeHeight = 50; + const nodeRx = 10; + + const activeOpacity = 1; + const dimmedOpacity = 0.2; + + const leftActive = isLeft; + const rightActive = !isLeft; + + return ( +
+ {/* Toggle */} +
+ + value = 72 + + + + value = 23 + +
+ + {/* Diagram */} +
+ + + + + + + + + + + + + + + + + + + {/* Task A — always active */} + + + + Task A + + + + {/* Condition diamond — always active */} + + + + {isLeft ? "> 50 ✓" : "≤ 50 ✓"} + + + + {/* Left Branch */} + + + + Task B + + + {leftActive ? "runs ✓" : "skipped"} + + + + {/* Right Branch */} + + + + Task C + + + {rightActive ? "runs ✓" : "skipped"} + + + + {/* Task D — always active */} + + + + Task D + + + + {/* Edge: Task A -> diamond — always active */} + + + {/* Edge: diamond -> Task B */} + + + {/* Edge: diamond -> Task C */} + + + {/* Edge: Task B -> Task D */} + + + {/* Edge: Task C -> Task D */} + + +
+
+ ); +}; + +export default BranchingDiagram; diff --git a/frontend/docs/components/BranchingDiagramWrapper.tsx b/frontend/docs/components/BranchingDiagramWrapper.tsx new file mode 100644 index 0000000000..3be3775056 --- /dev/null +++ b/frontend/docs/components/BranchingDiagramWrapper.tsx @@ -0,0 +1,7 @@ +import dynamic from "next/dynamic"; + +const BranchingDiagram = dynamic(() => import("./BranchingDiagram"), { + ssr: false, +}); + +export default BranchingDiagram; diff --git a/frontend/docs/components/CycleDiagram.tsx b/frontend/docs/components/CycleDiagram.tsx new file mode 100644 index 0000000000..aee5e10c20 --- /dev/null +++ b/frontend/docs/components/CycleDiagram.tsx @@ -0,0 +1,297 @@ +import React, { useState, useEffect } from "react"; +import { brand, state, fill, inactive, gradient } from "./diagram-colors"; + +const CycleDiagram: React.FC = () => { + const [iteration, setIteration] = useState(0); + const maxIterations = 3; + + useEffect(() => { + const timer = setInterval(() => { + setIteration((prev) => (prev + 1) % (maxIterations + 1)); + }, 2000); + return () => clearInterval(timer); + }, []); + + const nodeWidth = 140; + const nodeHeight = 50; + const nodeRx = 10; + + // Positions + const taskX = 60; + const taskY = 100; + const checkX = 300; + const checkY = 125; + const doneX = 520; + const doneY = 100; + + const isDone = iteration === maxIterations; + + return ( +
+ {/* Iteration counter */} +
+ Iteration: + {[0, 1, 2].map((i) => ( + + {i + 1} + + ))} + + {isDone ? "done!" : "running..."} + +
+ + {/* Diagram */} +
+ + + + + + + + + + + + + + + + + + + {/* Task box */} + + + Task + + + do work + + + {/* Condition diamond */} + + + + {isDone ? "done ✓" : "done?"} + + + {isDone ? "" : "not yet"} + + + + {/* Done box */} + + + Complete + + + return result + + + {/* Edge: Task -> Check */} + + + {/* Edge: Check -> Done (right) */} + + {/* "yes" label on done edge */} + + yes + + + {/* Loop-back edge: Check -> Task (curved below) */} + + {/* "no, loop" label */} + + no → re-run + + +
+
+ ); +}; + +export default CycleDiagram; diff --git a/frontend/docs/components/CycleDiagramWrapper.tsx b/frontend/docs/components/CycleDiagramWrapper.tsx new file mode 100644 index 0000000000..f0cf3c91c8 --- /dev/null +++ b/frontend/docs/components/CycleDiagramWrapper.tsx @@ -0,0 +1,7 @@ +import dynamic from "next/dynamic"; + +const CycleDiagram = dynamic(() => import("./CycleDiagram"), { + ssr: false, +}); + +export default CycleDiagram; diff --git a/frontend/docs/components/DocumentProcessingDiagram.tsx b/frontend/docs/components/DocumentProcessingDiagram.tsx new file mode 100644 index 0000000000..5ed39143a0 --- /dev/null +++ b/frontend/docs/components/DocumentProcessingDiagram.tsx @@ -0,0 +1,291 @@ +import React, { useState, useEffect } from "react"; +import { brand, state, inactive, container } from "./diagram-colors"; + +const STAGES = [ + { id: "ingest", label: "Ingest", color: "#3392FF" }, + { id: "parse", label: "Parse", color: "#BC46DD" }, + { id: "extract", label: "Extract", color: "#EAB308" }, + { id: "validate", label: "Validate", color: "#B8D41C" }, + { id: "output", label: "Output", color: "#22C55E" }, +] as const; + +type StageId = (typeof STAGES)[number]["id"]; + +const StageIcon: React.FC<{ id: StageId; color: string; size?: number }> = ({ + id, + color, + size = 16, +}) => { + const props = { + width: size, + height: size, + viewBox: "0 0 24 24", + fill: "none", + stroke: color, + strokeWidth: "2", + strokeLinecap: "round" as const, + strokeLinejoin: "round" as const, + }; + + switch (id) { + case "ingest": + return ( + + + + + + ); + case "parse": + return ( + + + + + + + + ); + case "extract": + return ( + + + + + + + ); + case "validate": + return ( + + + + + ); + case "output": + return ( + + + + + + ); + } +}; + +const DocumentProcessingDiagram: React.FC = () => { + const [activeStage, setActiveStage] = useState(0); + + useEffect(() => { + const interval = setInterval(() => { + setActiveStage((prev) => (prev + 1) % STAGES.length); + }, 1500); + return () => clearInterval(interval); + }, []); + + const stageWidth = 64; + const gap = 12; + const totalWidth = STAGES.length * stageWidth + (STAGES.length - 1) * gap; + const startX = (480 - totalWidth) / 2; + + return ( +
+ + {/* Connecting arrows */} + {STAGES.slice(0, -1).map((_, i) => { + const fromX = startX + i * (stageWidth + gap) + stageWidth; + const toX = startX + (i + 1) * (stageWidth + gap); + const y = 70; + const isActive = i === activeStage || i + 1 === activeStage; + + return ( + + + + + ); + })} + + {/* Stage boxes */} + {STAGES.map((stage, i) => { + const x = startX + i * (stageWidth + gap); + const y = 40; + const isActive = i === activeStage; + + return ( + + {isActive && ( + + + + )} + + + + + + {stage.label} + + + ); + })} + + {/* Per-file fanout under Parse */} + + + per-file fanout + + + + + {/* Rate-limited under Extract */} + + + rate-limited + + + + + {/* Retry indicator under Validate */} + + + retries on failure + + + + + +
+ {STAGES.map((stage, i) => ( +
+ ))} +
+
+ ); +}; + +export default DocumentProcessingDiagram; diff --git a/frontend/docs/components/DocumentProcessingDiagramWrapper.tsx b/frontend/docs/components/DocumentProcessingDiagramWrapper.tsx new file mode 100644 index 0000000000..7bf57b8473 --- /dev/null +++ b/frontend/docs/components/DocumentProcessingDiagramWrapper.tsx @@ -0,0 +1,10 @@ +import dynamic from "next/dynamic"; + +const DocumentProcessingDiagram = dynamic( + () => import("./DocumentProcessingDiagram"), + { + ssr: false, + }, +); + +export default DocumentProcessingDiagram; diff --git a/frontend/docs/components/DurableWorkflowComparisonDiagram.tsx b/frontend/docs/components/DurableWorkflowComparisonDiagram.tsx new file mode 100644 index 0000000000..dbda0036e7 --- /dev/null +++ b/frontend/docs/components/DurableWorkflowComparisonDiagram.tsx @@ -0,0 +1,767 @@ +import React from "react"; +import { brand, state, fill, inactive, gradient } from "./diagram-colors"; + +const DurableWorkflowComparisonDiagram: React.FC = () => { + const nodeW = 130; + const nodeH = 36; + const smallW = 100; + const smallH = 30; + const rx = 8; + + return ( +
+
+
+ {/* Left: Durable Task Execution */} +
+ + + + + + + + + + + + + + + + + + + + Durable Task + + + shape of work is dynamic + + + {/* Container */} + + + {/* Step 1: Do work */} + + + do_work() + + + line 12 + + + {/* Arrow 1→2 */} + + + {/* Step 2: sleep_for (checkpoint) */} + + + sleep_for(24h) + + + checkpoint + + {/* Save icon */} + + + + + + + {/* Arrow 2→3 */} + + + {/* Step 3: spawn_tasks */} + + + spawn_tasks() + + + fan-out + + + {/* Spawn arrows to child tasks */} + + + + {/* Child task A */} + + + child task 1 + + + {/* Child task B */} + + + child task 2 + + + {/* "..." more children */} + + ... + + + {/* Arrow 3→4 */} + + + {/* Step 4: collect results (checkpoint) */} + + + wait_for_results() + + + checkpoint + + {/* Save icon */} + + + + + + + {/* Arrow 4→5 */} + + + {/* Step 5: process results */} + + + process_results() + + + line 20 + + + {/* Left annotation: call stack bracket */} + + + + + single function + + + {/* Annotation: children run independently */} + + run on any worker + + + {/* Bottom annotation */} + + procedural · checkpoints · N decided at runtime + + +
+ + {/* Right: DAG */} +
+ + + + + + + + + + + + + + + + + + + + DAG Workflow + + + shape of work is known upfront + + + {/* Container */} + + + {/* Task A (top) */} + + + Extract + + + {/* Fan out: A → B and A → C */} + + + + {/* Task B (left) */} + + + Transform A + + + {/* Task C (right) */} + + + Transform B + + + {/* Fan in: B → D and C → D */} + + + + {/* Task D (bottom, merge) */} + + + Load + + + {/* Annotations */} + + start + + + parallel + + + waits for both + + + {/* Bottom annotation */} + + declared graph · fixed shape · each task independent + + +
+
+
+
+ ); +}; + +export default DurableWorkflowComparisonDiagram; diff --git a/frontend/docs/components/DurableWorkflowDiagram.tsx b/frontend/docs/components/DurableWorkflowDiagram.tsx new file mode 100644 index 0000000000..9ed43ca717 --- /dev/null +++ b/frontend/docs/components/DurableWorkflowDiagram.tsx @@ -0,0 +1,383 @@ +import React, { useState, useEffect } from "react"; +import { brand, state, fill, inactive, gradient } from "./diagram-colors"; + +const DurableWorkflowDiagram: React.FC = () => { + // Phases: 0=running, 1=checkpoint, 2=interrupted, 3=resumed, 4=complete + const [phase, setPhase] = useState(0); + + useEffect(() => { + const durations = [1500, 1200, 1800, 1200, 1500]; + const timer = setTimeout(() => { + setPhase((prev) => (prev + 1) % 5); + }, durations[phase]); + return () => clearTimeout(timer); + }, [phase]); + + const nodeW = 120; + const nodeH = 44; + const rx = 8; + + // Timeline positions + const steps = [ + { x: 30, label: "Do Work", sub: "step 1" }, + { x: 175, label: "Checkpoint", sub: "save state" }, + { x: 320, label: "Interrupted", sub: "worker crash" }, + { x: 465, label: "Restore", sub: "new worker" }, + { x: 610, label: "Complete", sub: "step 2" }, + ]; + const y = 90; + + const phaseColors = [ + { + fill: fill.activeNode, + stroke: brand.blue, + text: brand.cyan, + sub: brand.blue, + }, + { + fill: fill.running, + stroke: state.running, + text: state.runningLight, + sub: state.runningDark, + }, + { + fill: fill.failed, + stroke: state.failed, + text: state.failedLight, + sub: state.failed, + }, + { + fill: fill.success, + stroke: state.success, + text: state.successLighter, + sub: state.successLight, + }, + { + fill: fill.success, + stroke: state.success, + text: state.successLighter, + sub: state.successLight, + }, + ]; + + const statusLabels = [ + "running...", + "checkpointing...", + "interrupted!", + "restoring...", + "complete!", + ]; + const statusColors = [ + brand.blue, + state.runningLight, + state.failedLight, + state.successLight, + state.successLight, + ]; + + return ( +
+ {/* Status bar */} +
+ Durable task: + {steps.map((s, i) => ( + + ))} + + {statusLabels[phase]} + +
+ + {/* Diagram */} +
+ + + + + + + + + + + + + + + + + + + + + + + {/* Timeline base line */} + + + {/* Progress line */} + = 3 + ? state.success + : brand.blue + } + strokeWidth="2" + style={{ transition: "all 0.6s ease" }} + /> + + {/* Edges between nodes */} + {steps.slice(0, -1).map((s, i) => { + const nextX = steps[i + 1].x; + const isCurrent = i === phase; + + let edgeColor: string = inactive.edge; + if (i < phase) { + edgeColor = + i === 1 ? state.running : i === 2 ? state.success : brand.blue; + } + if (isCurrent) { + edgeColor = phaseColors[phase].stroke; + } + + return ( + + ); + })} + + {/* Nodes */} + {steps.map((s, i) => { + const isActive = i === phase; + const isPast = i < phase; + + let nodeFill: string = fill.inactiveNode; + let stroke: string = inactive.edge; + let textColor: string = inactive.text; + let subColor: string = inactive.stroke; + + if (isActive) { + nodeFill = phaseColors[i].fill; + stroke = phaseColors[i].stroke; + textColor = phaseColors[i].text; + subColor = phaseColors[i].sub; + } else if (isPast) { + nodeFill = phaseColors[i].fill; + stroke = phaseColors[i].stroke; + textColor = phaseColors[i].text; + subColor = phaseColors[i].sub; + } + + return ( + + phase ? 0.3 : 1, + }} + /> + + {s.label} + + + {s.sub} + + + {/* Crash indicator (SVG bolt) */} + {i === 2 && isActive && ( + + + + + + )} + + {/* Checkpoint indicator (SVG save/disk) */} + {i === 1 && (isActive || isPast) && ( + + + + + + + + )} + + {/* Timeline dot */} + + + ); + })} + + {/* Arrow showing "skip replay" from checkpoint to restore */} + {phase >= 3 && ( + = 3 ? 1 : 0, + transition: "opacity 0.5s ease", + }} + > + + + replay from checkpoint + + + )} + +
+
+ ); +}; + +export default DurableWorkflowDiagram; diff --git a/frontend/docs/components/DurableWorkflowDiagramWrapper.tsx b/frontend/docs/components/DurableWorkflowDiagramWrapper.tsx new file mode 100644 index 0000000000..82f43e5398 --- /dev/null +++ b/frontend/docs/components/DurableWorkflowDiagramWrapper.tsx @@ -0,0 +1,10 @@ +import dynamic from "next/dynamic"; + +const DurableWorkflowDiagram = dynamic( + () => import("./DurableWorkflowDiagram"), + { + ssr: false, + }, +); + +export default DurableWorkflowDiagram; diff --git a/frontend/docs/components/EmbeddingIntegrationTabs.mdx b/frontend/docs/components/EmbeddingIntegrationTabs.mdx new file mode 100644 index 0000000000..21b99d3dda --- /dev/null +++ b/frontend/docs/components/EmbeddingIntegrationTabs.mdx @@ -0,0 +1,56 @@ +import { Callout, Tabs } from "nextra/components"; +import { snippets } from "@/lib/generated/snippets"; +import { Snippet } from "@/components/code"; +import PackageManagerInstall from "@/components/PackageManagerInstall"; +import UniversalTabs from "@/components/UniversalTabs"; + + + + +OpenAI's [Embeddings API](https://platform.openai.com/docs/guides/embeddings) converts text into high-dimensional vectors. It supports configurable dimensions and is a popular default for semantic search and RAG pipelines. + + + + + + + + + + + + + + + + + + + + + + +Cohere's [Embed API](https://docs.cohere.com/docs/embeddings) produces embeddings optimized for search and classification with multilingual support. It natively supports `input_type` hints (`"search_document"` vs `"search_query"`) for better retrieval accuracy. + + + + + + + + + + + + + Cohere Go: `go get github.com/cohere-ai/cohere-go`, use `Client.Embed()`. + + + + + Cohere Ruby: `bundle add cohere-ruby`. + + + + + diff --git a/frontend/docs/components/EventDrivenDiagram.tsx b/frontend/docs/components/EventDrivenDiagram.tsx new file mode 100644 index 0000000000..7c43d52b3a --- /dev/null +++ b/frontend/docs/components/EventDrivenDiagram.tsx @@ -0,0 +1,265 @@ +import React, { useState, useEffect } from "react"; +import { brand, state, inactive, container, fill } from "./diagram-colors"; + +type SourceType = "Webhook" | "Cron" | "Event"; + +const SOURCES: { label: SourceType; color: string }[] = [ + { label: "Webhook", color: brand.blue }, + { label: "Cron", color: brand.magenta }, + { label: "Event", color: state.running }, +]; + +const SourceIcon: React.FC<{ + type: SourceType; + color: string; + size?: number; +}> = ({ type, color, size = 14 }) => { + const props = { + width: size, + height: size, + viewBox: "0 0 24 24", + fill: "none", + stroke: color, + strokeWidth: "2", + strokeLinecap: "round" as const, + strokeLinejoin: "round" as const, + }; + + switch (type) { + case "Webhook": + // Link icon + return ( + + + + + ); + case "Cron": + // Clock icon + return ( + + + + + ); + case "Event": + // Signal/broadcast icon + return ( + + + + + + + + + ); + } +}; + +const GearIcon: React.FC<{ color: string; size?: number }> = ({ + color, + size = 14, +}) => ( + + + + +); + +const EventDrivenDiagram: React.FC = () => { + const [activeSource, setActiveSource] = useState(0); + const [pulseVisible, setPulseVisible] = useState(false); + + useEffect(() => { + const interval = setInterval(() => { + setPulseVisible(true); + setTimeout(() => setPulseVisible(false), 800); + setTimeout(() => { + setActiveSource((prev) => (prev + 1) % SOURCES.length); + }, 1000); + }, 2000); + return () => clearInterval(interval); + }, []); + + return ( +
+ + {/* Event sources */} + {SOURCES.map((src, i) => { + const y = 30 + i * 55; + const isActive = i === activeSource; + + return ( + + {/* Source box */} + + {/* Icon via foreignObject */} + + + + + {src.label} + + + {/* Arrow to Hatchet */} + + + ); + })} + + {/* Hatchet engine center */} + + + Hatchet + + + Engine + + + {/* Workers */} + {[0, 1, 2].map((i) => { + const y = 30 + i * 55; + const isActive = pulseVisible && i === activeSource; + + return ( + + {/* Arrow from Hatchet to worker */} + + + {/* Worker box */} + + {/* Gear icon */} + + + + + Worker {i + 1} + + + ); + })} + + + {/* Legend */} +
+ {SOURCES.map((src, i) => ( +
+ + {src.label} +
+ ))} +
+ + ); +}; + +export default EventDrivenDiagram; diff --git a/frontend/docs/components/EventDrivenDiagramWrapper.tsx b/frontend/docs/components/EventDrivenDiagramWrapper.tsx new file mode 100644 index 0000000000..9f83c7bf1e --- /dev/null +++ b/frontend/docs/components/EventDrivenDiagramWrapper.tsx @@ -0,0 +1,7 @@ +import dynamic from "next/dynamic"; + +const EventDrivenDiagram = dynamic(() => import("./EventDrivenDiagram"), { + ssr: false, +}); + +export default EventDrivenDiagram; diff --git a/frontend/docs/components/FanoutDiagram.tsx b/frontend/docs/components/FanoutDiagram.tsx new file mode 100644 index 0000000000..d29fa986a0 --- /dev/null +++ b/frontend/docs/components/FanoutDiagram.tsx @@ -0,0 +1,230 @@ +import React from "react"; +import { brand, state, fill, inactive, gradient } from "./diagram-colors"; + +const FanoutDiagram: React.FC = () => { + const childYPositions = [40, 110, 180, 270]; + const childLabels = ["Child 1", "Child 2", "Child 3", "Child N"]; + + return ( +
+
+ + + + + + + + + + + + + + + + + + + {/* Parent Task Box */} + + + Parent Task + + + spawn(input) + + + {/* Fan-out lines from parent to children */} + {childYPositions.map((cy, i) => ( + + ))} + + {/* Child boxes */} + {childLabels.slice(0, 3).map((label, i) => ( + + + + {label} + + + ))} + + {/* Ellipsis between Child 3 and Child N */} + + ... + + + {/* Child N */} + + + Child N + + + {/* Converge lines from children to results */} + {childYPositions.map((cy, i) => ( + + ))} + + {/* Results Box */} + + + Collect Results + + + await all children + + +
+
+ ); +}; + +export default FanoutDiagram; diff --git a/frontend/docs/components/HumanInLoopDiagram.tsx b/frontend/docs/components/HumanInLoopDiagram.tsx new file mode 100644 index 0000000000..49ced71ec7 --- /dev/null +++ b/frontend/docs/components/HumanInLoopDiagram.tsx @@ -0,0 +1,402 @@ +import React, { useState, useEffect } from "react"; +import { brand, state, fill, inactive, gradient } from "./diagram-colors"; + +const HumanInLoopDiagram: React.FC = () => { + const [phase, setPhase] = useState(0); // 0=agent, 1=waiting, 2=human, 3=resume, 4=complete + + useEffect(() => { + const durations = [1200, 2200, 1000, 1200, 1200]; + const timer = setTimeout(() => { + setPhase((prev) => (prev + 1) % 5); + }, durations[phase]); + return () => clearTimeout(timer); + }, [phase]); + + const nodeW = 88; + const nodeH = 40; + const rx = 10; + const gap = 28; + + const paddingX = 32; + const agentX = paddingX; + const waitX = agentX + nodeW + gap; + const waitW = 140; + const resumeX = waitX + waitW + gap; + const completeX = resumeX + nodeW + gap; + + const flowY = 115; + const humanH = 48; + const humanW = 82; + const humanY = 16; + const humanX = waitX + waitW / 2 - humanW / 2; + const totalW = completeX + nodeW + paddingX; + + const isAgent = phase === 0; + const isWaiting = phase === 1; + const isHuman = phase === 2; + const isResumed = phase === 3; + const isDone = phase === 4; + + const waitActive = isWaiting || isHuman; + + return ( +
+
+ {isDone + ? "complete" + : isResumed + ? "resuming..." + : isHuman + ? "human approves" + : isWaiting + ? "slot freed — waiting" + : "agent proposes"} +
+ +
+ + + + + + + + + + + + + + + + + + + {/* Agent box */} + + + Agent + + + proposes + + + {/* Edge: Agent -> Wait */} + + + {/* Wait for Approval box (paused block) */} + + + Wait for Approval + + + {isWaiting ? "slot freed" : "WaitForEvent"} + + {isWaiting && ( + + + + + )} + + {/* Human above the paused block */} + + {/* Dashed line: Human bottom -> top of Wait block */} + + {/* Human box */} + + {/* Person icon */} + + + + + + Human + + + approves + + + + {/* Edge: Wait -> Resume */} + + + {/* Resume box */} + + + Resume + + + event received + + + {/* Edge: Resume -> Complete */} + + + {/* Complete box */} + + + Complete + + + continue + + +
+
+ ); +}; + +export default HumanInLoopDiagram; diff --git a/frontend/docs/components/HumanInLoopDiagramWrapper.tsx b/frontend/docs/components/HumanInLoopDiagramWrapper.tsx new file mode 100644 index 0000000000..17552dc41b --- /dev/null +++ b/frontend/docs/components/HumanInLoopDiagramWrapper.tsx @@ -0,0 +1,7 @@ +import dynamic from "next/dynamic"; + +const HumanInLoopDiagram = dynamic(() => import("./HumanInLoopDiagram"), { + ssr: false, +}); + +export default HumanInLoopDiagram; diff --git a/frontend/docs/components/InstallCommand.tsx b/frontend/docs/components/InstallCommand.tsx index 3f0d572f5a..b085f62f23 100644 --- a/frontend/docs/components/InstallCommand.tsx +++ b/frontend/docs/components/InstallCommand.tsx @@ -49,7 +49,10 @@ export default function InstallCommand({ ); } else if (selectedLanguage === "Python") { return withDevDependencies ? ( - + + + + ) : ( - +
Create a virtual environment
+ +
Create a virtual environment
+ +
Initialize and install the Hatchet SDK
+ +
); } else if (selectedLanguage === "Go") { diff --git a/frontend/docs/components/LLMIntegrationTabs.mdx b/frontend/docs/components/LLMIntegrationTabs.mdx new file mode 100644 index 0000000000..c912f354a8 --- /dev/null +++ b/frontend/docs/components/LLMIntegrationTabs.mdx @@ -0,0 +1,133 @@ +import { Callout, Tabs } from "nextra/components"; +import { snippets } from "@/lib/generated/snippets"; +import { Snippet } from "@/components/code"; +import PackageManagerInstall from "@/components/PackageManagerInstall"; +import UniversalTabs from "@/components/UniversalTabs"; + + + + +OpenAI's [Chat Completions API](https://platform.openai.com/docs/guides/text-generation) provides access to GPT models for text generation, function calling, and structured outputs. It's the most widely adopted LLM API and supports streaming, tool use, and JSON mode. + + + + + + + + + + + + + + + + + + + + + + +Anthropic's [Messages API](https://docs.anthropic.com/en/docs/build-with-claude/text-generation) powers the Claude family of models, including Claude Sonnet and Claude Haiku. Claude excels at long-context reasoning, careful instruction following, and tool use with extended thinking support. + + + + + + + + + + + + + Anthropic Go: `go get github.com/anthropics/anthropic-sdk-go`, wire `messages.Create()` into your complete function. + + + + + Anthropic Ruby: `bundle add anthropic`, wire the client into your complete function. + + + + + + +[Groq](https://console.groq.com/docs/overview) provides ultra-fast inference for open-source models like Llama and Mixtral using custom LPU hardware. Its OpenAI-compatible API makes it a drop-in replacement when you need low latency. + + + + + + + + + + + + + Groq: use `net/http` against `api.groq.com/openai/v1/chat/completions`. See [Groq docs](https://console.groq.com/docs). + + + + + Groq Ruby: `bundle add groq` or use HTTP client. See [Groq docs](https://console.groq.com/docs). + + + + + + +The [Vercel AI SDK](https://sdk.vercel.ai/docs) is a TypeScript toolkit that provides a unified interface across providers (OpenAI, Anthropic, Google, and more). It includes helpers for streaming, tool calls, and structured object generation via `generateText` and `streamText`. + + + + + Vercel AI SDK is JavaScript/TypeScript only. Use OpenAI, Anthropic, or Groq SDK directly. + + + + + + + + + Vercel AI SDK is JavaScript/TypeScript only. + + + + + Vercel AI SDK is JavaScript/TypeScript only. + + + + + + +[Ollama](https://ollama.com/) runs open-source models locally with no API key required. It supports Llama, Mistral, Gemma, and others through a simple REST API on `localhost:11434`. Ideal for development, air-gapped environments, or when you want full control over your model. + + + + + + + + + Use `fetch` to `http://localhost:11434/api/chat`. See [Ollama API](https://github.com/ollama/ollama/blob/main/docs/api.md). + + + + + Use `net/http` to `http://localhost:11434/api/chat`. See [Ollama API](https://github.com/ollama/ollama/blob/main/docs/api.md). + + + + + Use `Net::HTTP` to `http://localhost:11434/api/chat`. See [Ollama API](https://github.com/ollama/ollama/blob/main/docs/api.md). + + + + + diff --git a/frontend/docs/components/LLMIntegrationTabs.tsx b/frontend/docs/components/LLMIntegrationTabs.tsx new file mode 100644 index 0000000000..a542b511ea --- /dev/null +++ b/frontend/docs/components/LLMIntegrationTabs.tsx @@ -0,0 +1,305 @@ +import { Callout, Tabs } from "nextra/components"; +import { snippets } from "@/lib/generated/snippets"; +import { Snippet } from "@/components/code"; +import PackageManagerInstall from "@/components/PackageManagerInstall"; +import UniversalTabs from "@/components/UniversalTabs"; + +/** Nested tabs: Provider → Language. Wire into get_llm_service() / LLMService.generate(). */ +export function LLMIntegrationTabs() { + return ( + + +

+ OpenAI's{" "} + + Chat Completions API + {" "} + provides access to GPT models for text generation, function calling, + and structured outputs. It's the most widely adopted LLM API and + supports streaming, tool use, and JSON mode. +

+ + + + + + + + + + + + + + + + + + +
+ +

+ Anthropic's{" "} + + Messages API + {" "} + powers the Claude family of models, including{" "} + claude-sonnet and claude-haiku. Claude + excels at long-context reasoning, careful instruction following, and + tool use with extended thinking support. +

+ + + + + + + + + + + + Anthropic Go:{" "} + go get github.com/anthropics/anthropic-sdk-go — wire{" "} + messages.Create() into your complete function. + + + + + Anthropic Ruby: bundle add anthropic — wire the + client into your complete function. + + + +
+ +

+ + Groq + {" "} + provides ultra-fast inference for open-source models like Llama and + Mixtral using custom LPU hardware. Its OpenAI-compatible API makes it + a drop-in replacement when you need low latency. +

+ + + + + + + + + + + + Groq: use net/http against{" "} + api.groq.com/openai/v1/chat/completions. See{" "} + + Groq docs + + . + + + + + Groq Ruby: bundle add groq or use HTTP client. See{" "} + + Groq docs + + . + + + +
+ +

+ The{" "} + + Vercel AI SDK + {" "} + is a TypeScript toolkit that provides a unified interface across + providers (OpenAI, Anthropic, Google, and more). It includes helpers + for streaming, tool calls, and structured object generation via{" "} + generateText and streamText. +

+ + + + Vercel AI SDK is JavaScript/TypeScript only. Use OpenAI, + Anthropic, or Groq SDK directly. + + + + + + + + + Vercel AI SDK is JavaScript/TypeScript only. + + + + + Vercel AI SDK is JavaScript/TypeScript only. + + + +
+ +

+ + Ollama + {" "} + runs open-source models locally — no API key required. It supports + Llama, Mistral, Gemma, and others through a simple REST API on{" "} + localhost:11434. Ideal for development, air-gapped + environments, or when you want full control over your model. +

+ + Prerequisites — install Ollama, start the server, and + pull a model before running the examples below: +
+            {`# Install (macOS / Linux)
+curl -fsSL https://ollama.com/install.sh | sh
+
+# Start the server (runs on localhost:11434)
+ollama serve
+
+# Pull a model (in a separate terminal)
+ollama pull llama3.2`}
+          
+
+ + + + + + + + Use fetch to{" "} + http://localhost:11434/api/chat. See{" "} + + Ollama API + + . + + + + + Use net/http to{" "} + http://localhost:11434/api/chat. See{" "} + + Ollama API + + . + + + + + Use Net::HTTP to{" "} + http://localhost:11434/api/chat. See{" "} + + Ollama API + + . + + + +
+
+ ); +} + +export default LLMIntegrationTabs; diff --git a/frontend/docs/components/LanguageSelectorButton.tsx b/frontend/docs/components/LanguageSelectorButton.tsx new file mode 100644 index 0000000000..9e95ba5ac7 --- /dev/null +++ b/frontend/docs/components/LanguageSelectorButton.tsx @@ -0,0 +1,244 @@ +import React, { useState, useEffect } from "react"; +import { useRouter } from "next/router"; +import { useLanguage } from "../context/LanguageContext"; +import { + DOC_LANGUAGES, + DEFAULT_LANGUAGE, + LOGO_PATHS, + getPackageManagers, + getFixedPackageManagerMessage, + type DocLanguage, +} from "@/lib/docs-languages"; +import { ChevronDownIcon } from "@radix-ui/react-icons"; +import { + Dialog, + DialogClose, + DialogContent, + DialogDescription, + DialogFooter, + DialogHeader, + DialogTitle, + DialogTrigger, +} from "@/components/ui/dialog"; +import { Button } from "@/components/ui/button"; + +const CALLOUT_DISMISSED_KEY = "docsLanguageCalloutDismissed"; + +function ThemedIcon({ src, size = 12 }: { src: string; size?: number }) { + return ( + + ); +} + +function resolveSelected(lang: string): DocLanguage { + const exact = DOC_LANGUAGES.find((l) => l === lang); + if (exact) return exact; + const lower = lang.toLowerCase(); + const match = DOC_LANGUAGES.find((l) => l.toLowerCase() === lower); + return match ?? DEFAULT_LANGUAGE; +} + +function resolvePackageManager(lang: DocLanguage, current: string): string { + const opts = getPackageManagers(lang); + if (!opts) return ""; + const exact = opts.find((p) => p === current); + if (exact) return exact; + const lower = current.toLowerCase(); + const match = opts.find((p) => p.toLowerCase() === lower); + return match ?? opts[0]; +} + +function LanguageModalContent() { + const router = useRouter(); + const basePath = router.basePath || ""; + const { + selectedLanguage, + setSelectedLanguage, + getSelectedOption, + setSelectedOption, + } = useLanguage(); + const current = resolveSelected(selectedLanguage); + const pmOptions = getPackageManagers(current); + const currentPm = pmOptions + ? resolvePackageManager( + current, + getSelectedOption("packageManager") || pmOptions[0], + ) + : null; + + return ( +
+
+
+ Language +
+
+ {DOC_LANGUAGES.map((lang) => { + const filename = LOGO_PATHS[lang]; + const isSelected = current === lang; + return ( + + ); + })} +
+
+
+
+ Package manager +
+ {pmOptions ? ( +
+ {pmOptions.map((pm) => { + const isSelected = currentPm === pm; + return ( + + ); + })} +
+ ) : ( +

+ {getFixedPackageManagerMessage(current) ?? ""} +

+ )} +
+
+ ); +} + +export function LanguageSelectorButton() { + const router = useRouter(); + const basePath = router.basePath || ""; + const { selectedLanguage } = useLanguage(); + const current = resolveSelected(selectedLanguage); + const [showCallout, setShowCallout] = useState(false); + + useEffect(() => { + if (typeof window === "undefined") return; + try { + const dismissed = localStorage.getItem(CALLOUT_DISMISSED_KEY) === "true"; + const hasChosenLanguage = + localStorage.getItem("uiOptions") !== null || + localStorage.getItem("selectedLanguage") !== null; + setShowCallout(!dismissed && !hasChosenLanguage); + } catch { + setShowCallout(true); + } + }, []); + + const dismissCallout = () => { + setShowCallout(false); + try { + localStorage.setItem(CALLOUT_DISMISSED_KEY, "true"); + } catch { + /* noop */ + } + }; + + return ( +
+ { + if (open) dismissCallout(); + }} + > + + + + + + Docs preferences + + Customize your documentation experience by selecting your your + stack for code examples. + + + + + + + + + + + + {showCallout && ( +
+
+
+ + 👋 Select a language so examples match your stack + +
+
+ )} +
+ ); +} diff --git a/frontend/docs/components/LanguageSwitcher.tsx b/frontend/docs/components/LanguageSwitcher.tsx new file mode 100644 index 0000000000..a9aa3ac0f0 --- /dev/null +++ b/frontend/docs/components/LanguageSwitcher.tsx @@ -0,0 +1,91 @@ +import React from "react"; +import { useRouter } from "next/router"; +import { useLanguage } from "../context/LanguageContext"; +import { + DOC_LANGUAGES, + DEFAULT_LANGUAGE, + LOGO_PATHS, +} from "@/lib/docs-languages"; + +function ThemedIcon({ src }: { src: string }) { + return ( + + ); +} + +function resolveSelected(lang: string) { + const exact = DOC_LANGUAGES.find((l) => l === lang); + if (exact) return exact; + const lower = lang.toLowerCase(); + const match = DOC_LANGUAGES.find((l) => l.toLowerCase() === lower); + return match ?? DEFAULT_LANGUAGE; +} + +export default function LanguageSwitcher() { + const router = useRouter(); + const basePath = router.basePath || ""; + const { selectedLanguage, setSelectedLanguage } = useLanguage(); + const current = resolveSelected(selectedLanguage); + + return ( +
+

+ Customize your docs experience — choose your preferred language for code + examples: +

+
+ {DOC_LANGUAGES.map((lang) => { + const filename = LOGO_PATHS[lang]; + const isSelected = current === lang; + return ( + + ); + })} +
+
+ ); +} diff --git a/frontend/docs/components/LongWaitDiagram.tsx b/frontend/docs/components/LongWaitDiagram.tsx new file mode 100644 index 0000000000..2bcbf4b1a8 --- /dev/null +++ b/frontend/docs/components/LongWaitDiagram.tsx @@ -0,0 +1,334 @@ +import React, { useState, useEffect } from "react"; +import { brand, state, fill, inactive, gradient } from "./diagram-colors"; + +const LongWaitDiagram: React.FC = () => { + const [phase, setPhase] = useState(0); // 0=running, 1=waiting, 2=resumed, 3=complete + + useEffect(() => { + const durations = [1200, 2400, 1200, 1200]; + const timer = setTimeout(() => { + setPhase((prev) => (prev + 1) % 4); + }, durations[phase]); + return () => clearTimeout(timer); + }, [phase]); + + const nodeW = 130; + const nodeH = 50; + const rx = 10; + + // Positions + const taskX = 30; + const taskY = 80; + const waitX = 220; + const waitY = 80; + const resumeX = 440; + const resumeY = 80; + const completeX = 600; + const completeY = 80; + + const isWaiting = phase === 1; + const isResumed = phase === 2; + const isDone = phase === 3; + + const waitLabel = "Sleep 24h"; + const waitSublabel = "durable pause"; + const triggerLabel = "time elapsed"; + + return ( +
+ {/* Status */} +
+ + {isDone + ? "complete!" + : isResumed + ? "resuming..." + : isWaiting + ? "waiting..." + : "running..."} + +
+ + {/* Diagram */} +
+ + + + + + + + + + + + + + + + + + + {/* Task box */} + + + Task Runs + + + do work + + + {/* Edge: Task -> Wait */} + + + {/* Wait box - larger, distinctive */} + + {/* Pause icon */} + {isWaiting && ( + + + + + )} + + {waitLabel} + + {!isWaiting && ( + + {waitSublabel} + + )} + + {/* Trigger label below wait box */} + + {isResumed || isDone ? triggerLabel : ""} + + + {/* Edge: Wait -> Resume */} + + + {/* Resume box */} + + + Resume + + + continue work + + + {/* Edge: Resume -> Complete */} + + + {/* Complete box */} + + + Complete + + + return result + + +
+
+ ); +}; + +export default LongWaitDiagram; diff --git a/frontend/docs/components/LongWaitDiagramWrapper.tsx b/frontend/docs/components/LongWaitDiagramWrapper.tsx new file mode 100644 index 0000000000..476dd7f715 --- /dev/null +++ b/frontend/docs/components/LongWaitDiagramWrapper.tsx @@ -0,0 +1,7 @@ +import dynamic from "next/dynamic"; + +const LongWaitDiagram = dynamic(() => import("./LongWaitDiagram"), { + ssr: false, +}); + +export default LongWaitDiagram; diff --git a/frontend/docs/components/Mermaid.tsx b/frontend/docs/components/Mermaid.tsx new file mode 100644 index 0000000000..cdbec81863 --- /dev/null +++ b/frontend/docs/components/Mermaid.tsx @@ -0,0 +1,109 @@ +"use client"; +import { jsx } from "react/jsx-runtime"; +import { useEffect, useId, useRef, useState } from "react"; + +function useIsVisible(ref: React.RefObject) { + const [isIntersecting, setIsIntersecting] = useState(false); + useEffect(() => { + if (!ref.current) return; + const observer = new IntersectionObserver(([entry]) => { + if (entry.isIntersecting) { + observer.disconnect(); + setIsIntersecting(true); + } + }); + observer.observe(ref.current); + return () => { + observer.disconnect(); + }; + }, [ref]); + return isIntersecting; +} + +const BRAND_THEME = { + primaryColor: "#0F1A3A", + primaryTextColor: "#B8D9FF", + primaryBorderColor: "#3392FF", + lineColor: "#3392FF", + secondaryColor: "#1A1035", + secondaryTextColor: "#D585EF", + secondaryBorderColor: "#BC46DD", + tertiaryColor: "#0D2A15", + tertiaryTextColor: "#86EFAC", + tertiaryBorderColor: "#22C55E", + background: "#02081D", + mainBkg: "#0F1A3A", + nodeBorder: "#3392FF", + clusterBkg: "#0A1029", + clusterBorder: "#1C2B4A", + titleColor: "#B8D9FF", + edgeLabelBackground: "#02081D", + noteBkgColor: "#162035", + noteTextColor: "#B8D9FF", + noteBorderColor: "#3392FF", + actorBorder: "#3392FF", + actorBkg: "#0F1A3A", + actorTextColor: "#B8D9FF", + actorLineColor: "#3392FF", + signalColor: "#B8D9FF", + signalTextColor: "#B8D9FF", + labelBoxBkgColor: "#0F1A3A", + labelBoxBorderColor: "#3392FF", + labelTextColor: "#B8D9FF", + loopTextColor: "#B8D9FF", + activationBorderColor: "#3392FF", + activationBkgColor: "#162947", + sequenceNumberColor: "#B8D9FF", +}; + +function Mermaid({ chart }: { chart: string }) { + const id = useId(); + const [svg, setSvg] = useState(""); + const containerRef = useRef(null); + const isVisible = useIsVisible(containerRef); + + useEffect(() => { + if (!isVisible) return; + + const htmlElement = document.documentElement; + const observer = new MutationObserver(renderChart); + observer.observe(htmlElement, { attributes: true }); + renderChart(); + return () => { + observer.disconnect(); + }; + + async function renderChart() { + const isDarkTheme = + htmlElement.classList.contains("dark") || + htmlElement.attributes.getNamedItem("data-theme")?.value === "dark"; + + const { default: mermaid } = await import("mermaid"); + try { + mermaid.initialize({ + startOnLoad: false, + securityLevel: "loose", + fontFamily: "inherit", + themeCSS: "margin: 1.5rem auto 0;", + theme: isDarkTheme ? "base" : "default", + ...(isDarkTheme ? { themeVariables: BRAND_THEME } : {}), + }); + const { svg: rendered } = await mermaid.render( + id.replaceAll(":", ""), + chart.replaceAll("\\n", "\n"), + containerRef.current ?? undefined, + ); + setSvg(rendered); + } catch (error) { + console.error("Error while rendering mermaid", error); + } + } + }, [chart, id, isVisible]); + + return jsx("div", { + ref: containerRef, + dangerouslySetInnerHTML: { __html: svg }, + }); +} + +export { Mermaid }; diff --git a/frontend/docs/components/OCRIntegrationTabs.mdx b/frontend/docs/components/OCRIntegrationTabs.mdx new file mode 100644 index 0000000000..311cd966c4 --- /dev/null +++ b/frontend/docs/components/OCRIntegrationTabs.mdx @@ -0,0 +1,104 @@ +import { Callout, Tabs } from "nextra/components"; +import { snippets } from "@/lib/generated/snippets"; +import { Snippet } from "@/components/code"; +import PackageManagerInstall from "@/components/PackageManagerInstall"; +import UniversalTabs from "@/components/UniversalTabs"; + + + + +[Tesseract](https://github.com/tesseract-ocr/tesseract) is an open-source OCR engine maintained by Google. It supports 100+ languages and runs entirely on your own infrastructure with no API key or cloud service required. A solid default for straightforward text extraction from images and scanned documents. + + + + + + + + + + + + + + + + + + + + + + +[Unstructured](https://docs.unstructured.io/) is a Python library for parsing and chunking complex documents (PDFs, Word files, HTML, and more). It combines layout detection with OCR to extract structured elements like titles, tables, and narrative text, ready for downstream LLM or RAG pipelines. + + + + + + + + + Unstructured is Python-only. Use Tesseract or Reducto API for Node. + + + + Unstructured is Python-only. + + + Unstructured is Python-only. + + + + + +[Reducto](https://docs.reducto.ai/) is a cloud API purpose-built for high-fidelity document extraction. It handles complex layouts, tables, and forms with strong accuracy out of the box, no OCR tuning needed. Particularly useful for invoices, contracts, and structured financial documents. + + + + + + + + + Reducto: use `fetch` to the [platform.reducto.ai](https://docs.reducto.ai/) API. + + + + + Reducto: use `net/http` to platform.reducto.ai. See [docs](https://docs.reducto.ai/). + + + + + Reducto: use HTTP client. See [docs](https://docs.reducto.ai/). + + + + + + +[Google Cloud Vision](https://cloud.google.com/vision/docs) provides production-grade OCR through a managed API. It supports handwriting recognition, document text detection, and PDF/TIFF batch processing with strong multilingual accuracy. Requires a Google Cloud project and service account credentials. + + + + + + + + + + + + + Google Vision: `go get cloud.google.com/go/vision/apiv1`. + + + + + Google Vision: `bundle add google-cloud-vision`. + + + + + diff --git a/frontend/docs/components/PackageManagerInstall.tsx b/frontend/docs/components/PackageManagerInstall.tsx new file mode 100644 index 0000000000..74eef81314 --- /dev/null +++ b/frontend/docs/components/PackageManagerInstall.tsx @@ -0,0 +1,79 @@ +import { useLanguage } from "@/context/LanguageContext"; +import { getPackageManagers } from "@/lib/docs-languages"; +import { Tabs } from "nextra/components"; +import UniversalTabs from "@/components/UniversalTabs"; +import { CodeBlock } from "@/components/code/CodeBlock"; + +export interface PackageManagerInstallProps { + /** Package spec per language. Omit a language to not show install (e.g. when using Callout instead). */ + packages: { + python?: string; + typescript?: string; + go?: string; + ruby?: string; + }; + /** Optional key to scope package manager preference (default: packageManager) */ + optionKey?: string; +} + +/** Renders install commands with language-specific package manager tabs (pip/poetry/uv, npm/pnpm/yarn). */ +export default function PackageManagerInstall({ + packages, + optionKey = "packageManager", +}: PackageManagerInstallProps) { + const { selectedLanguage } = useLanguage(); + + const normalizedLang = + selectedLanguage === "TypeScript" + ? "typescript" + : selectedLanguage.toLowerCase(); + + const pkg = packages[normalizedLang as keyof typeof packages]; + if (!pkg) return null; + + if (normalizedLang === "python") { + const options = getPackageManagers("Python")!; + return ( + + + + + + + + + + + + ); + } + + if (normalizedLang === "typescript") { + const options = getPackageManagers("Typescript")!; + return ( + + + + + + + + + + + + ); + } + + if (normalizedLang === "go") { + return ; + } + + if (normalizedLang === "ruby") { + return ( + + ); + } + + return null; +} diff --git a/frontend/docs/components/PatternComparison.tsx b/frontend/docs/components/PatternComparison.tsx new file mode 100644 index 0000000000..353f279426 --- /dev/null +++ b/frontend/docs/components/PatternComparison.tsx @@ -0,0 +1,192 @@ +import React from "react"; + +interface ComparisonRow { + label: string; + workflow: string; + durable: string; +} + +interface PatternComparisonProps { + rows: ComparisonRow[]; + recommendation?: "workflow" | "durable" | "both"; + recommendationText?: string; +} + +const PatternComparison: React.FC = ({ + rows, + recommendation = "workflow", + recommendationText, +}) => { + return ( +
+
+ {/* Workflows column */} +
+
+
+ + + + + + + +
+ + Workflows (DAGs) + + {recommendation === "workflow" && ( + + recommended + + )} +
+
+ {rows.map((row, i) => ( +
+
+ {row.label} +
+
{row.workflow}
+
+ ))} +
+
+ + {/* Durable column */} +
+
+
+ + + + + + +
+ + Durable Workflows + + {recommendation === "durable" && ( + + recommended + + )} +
+
+ {rows.map((row, i) => ( +
+
+ {row.label} +
+
{row.durable}
+
+ ))} +
+
+
+ + {/* Recommendation footer */} + {recommendationText && ( +
+ {recommendationText} +
+ )} +
+ ); +}; + +export default PatternComparison; diff --git a/frontend/docs/components/PipelineDiagram.tsx b/frontend/docs/components/PipelineDiagram.tsx new file mode 100644 index 0000000000..fb8a7716be --- /dev/null +++ b/frontend/docs/components/PipelineDiagram.tsx @@ -0,0 +1,156 @@ +import React from "react"; +import { brand, inactive, gradient } from "./diagram-colors"; + +const PipelineDiagram: React.FC = () => { + // Layout: + // Task A (standalone) + // Task B -> Task D -> Task E (pipeline) + // Task C (standalone) + const nodes = [ + { id: "a", label: "Task A", x: 30, y: 40 }, + { id: "b", label: "Task B", x: 30, y: 130 }, + { id: "c", label: "Task C", x: 30, y: 220 }, + { id: "d", label: "Task D", x: 300, y: 130 }, + { id: "e", label: "Task E", x: 560, y: 130 }, + ]; + + const nodeWidth = 140; + const nodeHeight = 50; + const nodeRx = 10; + + const edges = [ + { from: "b", to: "d" }, + { from: "d", to: "e" }, + ]; + + const getNode = (id: string) => nodes.find((n) => n.id === id)!; + + return ( +
+
+ + + + + + + + + + + + + + + {/* Nodes */} + {nodes.map((node) => ( + + + + {node.label} + + + ))} + + {/* Edges (rendered after nodes so lines appear on top) */} + {edges.map(({ from, to }, i) => { + const f = getNode(from); + const t = getNode(to); + const startX = f.x + nodeWidth + 2; + const startY = f.y + nodeHeight / 2; + const endX = t.x - 2; + const endY = t.y + nodeHeight / 2; + const midX = (startX + endX) / 2; + + return ( + + ); + })} + + {/* Parallel label and dashed box around A, B, C */} + + parallel + + + +
+
+ ); +}; + +export default PipelineDiagram; diff --git a/frontend/docs/components/RAGPipelineDiagram.tsx b/frontend/docs/components/RAGPipelineDiagram.tsx new file mode 100644 index 0000000000..e0274fbe07 --- /dev/null +++ b/frontend/docs/components/RAGPipelineDiagram.tsx @@ -0,0 +1,301 @@ +import React, { useState, useEffect } from "react"; +import { brand, state, inactive, container } from "./diagram-colors"; + +const STAGES = [ + { id: "ingest", label: "Ingest", color: "#3392FF" }, + { id: "chunk", label: "Chunk", color: "#BC46DD" }, + { id: "embed", label: "Embed", color: "#EAB308" }, + { id: "index", label: "Index", color: "#22C55E" }, + { id: "query", label: "Query", color: "#B8D41C" }, +] as const; + +type StageId = (typeof STAGES)[number]["id"]; + +const StageIcon: React.FC<{ id: StageId; color: string; size?: number }> = ({ + id, + color, + size = 16, +}) => { + const props = { + width: size, + height: size, + viewBox: "0 0 24 24", + fill: "none", + stroke: color, + strokeWidth: "2", + strokeLinecap: "round" as const, + strokeLinejoin: "round" as const, + }; + + switch (id) { + case "ingest": + // Download/import arrow + return ( + + + + + + ); + case "chunk": + // Scissors + return ( + + + + + + + + ); + case "embed": + // Grid/vector + return ( + + + + + + + ); + case "index": + // Database + return ( + + + + + + ); + case "query": + // Search + return ( + + + + + ); + } +}; + +const RAGPipelineDiagram: React.FC = () => { + const [activeStage, setActiveStage] = useState(0); + + useEffect(() => { + const interval = setInterval(() => { + setActiveStage((prev) => (prev + 1) % STAGES.length); + }, 1500); + return () => clearInterval(interval); + }, []); + + const stageWidth = 72; + const gap = 16; + const totalWidth = STAGES.length * stageWidth + (STAGES.length - 1) * gap; + const startX = (440 - totalWidth) / 2; + + return ( +
+ + {/* Connecting arrows */} + {STAGES.slice(0, -1).map((_, i) => { + const fromX = startX + i * (stageWidth + gap) + stageWidth; + const toX = startX + (i + 1) * (stageWidth + gap); + const y = 70; + const isActive = i === activeStage || i + 1 === activeStage; + + return ( + + + + + ); + })} + + {/* Stage boxes */} + {STAGES.map((stage, i) => { + const x = startX + i * (stageWidth + gap); + const y = 40; + const isActive = i === activeStage; + + return ( + + {/* Glow */} + {isActive && ( + + + + )} + {/* Box */} + + {/* Icon */} + + + + {/* Label */} + + {stage.label} + + + ); + })} + + {/* Fan-out indicator under chunk stage */} + + + fan-out to N chunks + + + + + {/* Rate limit indicator under embed stage */} + + + rate-limited API + + + + + {/* Retry indicator under index stage */} + + + retries on failure + + + + + + {/* Progress indicator */} +
+ {STAGES.map((stage, i) => ( +
+ ))} +
+
+ ); +}; + +export default RAGPipelineDiagram; diff --git a/frontend/docs/components/RAGPipelineDiagramWrapper.tsx b/frontend/docs/components/RAGPipelineDiagramWrapper.tsx new file mode 100644 index 0000000000..545f5fd0fc --- /dev/null +++ b/frontend/docs/components/RAGPipelineDiagramWrapper.tsx @@ -0,0 +1,7 @@ +import dynamic from "next/dynamic"; + +const RAGPipelineDiagram = dynamic(() => import("./RAGPipelineDiagram"), { + ssr: false, +}); + +export default RAGPipelineDiagram; diff --git a/frontend/docs/components/ScraperIntegrationTabs.mdx b/frontend/docs/components/ScraperIntegrationTabs.mdx new file mode 100644 index 0000000000..1ff12bcb57 --- /dev/null +++ b/frontend/docs/components/ScraperIntegrationTabs.mdx @@ -0,0 +1,108 @@ +import { Callout, Tabs } from "nextra/components"; +import { snippets } from "@/lib/generated/snippets"; +import { Snippet } from "@/components/code"; +import PackageManagerInstall from "@/components/PackageManagerInstall"; +import UniversalTabs from "@/components/UniversalTabs"; + + + + +[Firecrawl](https://docs.firecrawl.dev/) is a managed web scraping API that returns clean markdown from any URL. It handles JavaScript rendering, anti-bot bypasses, and sitemap crawling out of the box, so you can focus on what to do with the content instead of how to extract it. + + + + + + + + + + + + + Firecrawl Go: use `net/http` against the [Firecrawl REST API](https://docs.firecrawl.dev/api-reference). + + + + + Firecrawl Ruby: `bundle add firecrawl`. See [Firecrawl docs](https://docs.firecrawl.dev). + + + + + + +[Browserbase](https://docs.browserbase.com/) provides managed, headless Chrome browsers in the cloud. It pairs with Playwright or Puppeteer for full browser automation, handling stealth fingerprinting, proxies, and session management, so you can scrape JavaScript-heavy sites that block traditional HTTP requests. + + + + + + + + + + + + + Browserbase Go: connect via `chromedp` using the session CDP URL. See [Browserbase docs](https://docs.browserbase.com). + + + + + Browserbase Ruby: use Playwright via `playwright-ruby-client`. See [Browserbase docs](https://docs.browserbase.com). + + + + + + +[Playwright](https://playwright.dev/) is an open-source browser automation framework from Microsoft. It drives Chromium, Firefox, and WebKit with a single API, supporting navigation, clicks, form fills, and screenshots. Run it locally or in CI for scraping pages that require full browser rendering. + + + + + + + + + + + + + Playwright Go: use `go get github.com/playwright-community/playwright-go`. See [playwright-go](https://github.com/playwright-community/playwright-go). + + + + + Playwright Ruby: `bundle add playwright-ruby-client`. See [docs](https://playwright-ruby-client.vercel.app/). + + + + + + +OpenAI's [Web Search tool](https://platform.openai.com/docs/guides/tools-web-search) lets you augment a chat completion with live search results via the Responses API. The model decides when to search, synthesizes the results, and returns cited answers with no scraping infrastructure needed. + + + + + + + + + + + + + OpenAI Go: `go get github.com/sashabaranov/go-openai`, use the Responses API with `web_search` tool. + + + + + OpenAI Ruby: `bundle add openai`, use the Responses API with `web_search` tool. + + + + + diff --git a/frontend/docs/components/Search.tsx b/frontend/docs/components/Search.tsx index b31259feec..aa7832b1da 100644 --- a/frontend/docs/components/Search.tsx +++ b/frontend/docs/components/Search.tsx @@ -17,13 +17,17 @@ import { } from "@/lib/search-config"; // --------------------------------------------------------------------------- -// Lazy singleton for the search index +// Lazy singleton for the search index (keyed by basePath so basePath changes don't reuse wrong index) // --------------------------------------------------------------------------- let indexPromise: Promise | null = null; - -function loadIndex(): Promise { - if (!indexPromise) { - indexPromise = fetch("/llms-search-index.json") +let indexPromiseBasePath: string | undefined = undefined; + +function loadIndex(basePath: string = ""): Promise { + const prefix = basePath ? basePath.replace(/\/$/, "") : ""; + const url = `${prefix}/llms-search-index.json`; + if (indexPromise === null || indexPromiseBasePath !== basePath) { + indexPromiseBasePath = basePath; + indexPromise = fetch(url) .then((res) => { if (!res.ok) throw new Error(`Failed to load search index: ${res.status}`); @@ -228,6 +232,8 @@ export default function Search({ className }: { className?: string }) { prevIsOpenRef.current = isOpen; }, [isOpen]); + const basePath = router.basePath ?? ""; + // Lazy-load the search index on first interaction (focus / open) rather // than on every page load. The search-query effect below already handles // the case where the index isn't ready yet, so this is purely a preload @@ -236,9 +242,9 @@ export default function Search({ className }: { className?: string }) { const preloadIndex = useCallback(() => { if (!preloadTriggered.current) { preloadTriggered.current = true; - loadIndex().then(() => setIndexReady(true)); + loadIndex(basePath).then(() => setIndexReady(true)); } - }, []); + }, [basePath]); // Run the search when the query changes useEffect(() => { @@ -264,7 +270,7 @@ export default function Search({ className }: { className?: string }) { if (!indexReady) { setLoading(true); - loadIndex() + loadIndex(basePath) .then((idx) => { setIndexReady(true); setLoading(false); @@ -274,10 +280,10 @@ export default function Search({ className }: { className?: string }) { return; } - loadIndex() + loadIndex(basePath) .then(runSearch) .catch(() => {}); - }, [query, indexReady]); + }, [query, indexReady, basePath]); // Global keyboard shortcut: / or Cmd/Ctrl+K useEffect(() => { diff --git a/frontend/docs/components/SidebarFolderNav.tsx b/frontend/docs/components/SidebarFolderNav.tsx new file mode 100644 index 0000000000..91ce38b687 --- /dev/null +++ b/frontend/docs/components/SidebarFolderNav.tsx @@ -0,0 +1,36 @@ +"use client"; + +import { useRouter } from "next/router"; +import { useEffect } from "react"; + +/** + * Nextra renders sidebar folders with index pages as buttons. When the folder + * label is clicked (expanded or collapsed), we navigate to its index (data-href) + * so the user lands on the overview. + * Works for any folder that has a route (e.g. has an index.mdx). + */ +export function SidebarFolderNav() { + const router = useRouter(); + + useEffect(() => { + function handleClick(e: MouseEvent) { + const target = e.target as HTMLElement; + const button = target.closest?.( + ".nextra-sidebar-container button[data-href]", + ) as HTMLButtonElement | null; + if (!button) return; + + const href = button.getAttribute("data-href"); + if (!href) return; + + e.preventDefault(); + e.stopPropagation(); + router.push(href); + } + + document.addEventListener("click", handleClick, true); + return () => document.removeEventListener("click", handleClick, true); + }, [router]); + + return null; +} diff --git a/frontend/docs/components/UniversalTabs.tsx b/frontend/docs/components/UniversalTabs.tsx index f352def202..029f3c299c 100644 --- a/frontend/docs/components/UniversalTabs.tsx +++ b/frontend/docs/components/UniversalTabs.tsx @@ -1,18 +1,8 @@ import React from "react"; +import { useRouter } from "next/router"; import { Callout, Tabs } from "nextra/components"; import { useLanguage } from "../context/LanguageContext"; - -/* ── Logo map ──────────────────────────────────────────────── */ - -const LOGO_MAP: Record = { - Python: "/python-logo.svg", - "Python-Sync": "/python-logo.svg", - "Python-Async": "/python-logo.svg", - Typescript: "/typescript-logo.svg", - TypeScript: "/typescript-logo.svg", - Go: "/go-logo.svg", - Ruby: "/ruby-logo.svg", -}; +import { LOGO_PATHS } from "@/lib/docs-languages"; const tabLabelStyle: React.CSSProperties = { display: "inline-flex", @@ -46,12 +36,16 @@ function ThemedIcon({ src }: { src: string }) { } /** Returns a logo-enhanced label if a logo exists, otherwise the plain string. */ -function toTabLabel(name: string): string | React.ReactElement { - const logo = LOGO_MAP[name]; - if (!logo) return name; +function toTabLabel( + name: string, + basePath: string, +): string | React.ReactElement { + const filename = LOGO_PATHS[name]; + if (!filename) return name; + const src = `${basePath}/${filename}`.replace(/\/+/g, "/"); return ( - + {name} ); @@ -84,13 +78,27 @@ interface UniversalTabsProps { items: string[]; children: React.ReactNode; optionKey?: string; + variant?: "tabs" | "hidden"; +} + +/** Normalize item for matching (items may use "Typescript" vs "TypeScript"). */ +function resolveSelectedItem(items: string[], value: string): string { + const exact = items.find((i) => i === value); + if (exact) return exact; + const lower = value.toLowerCase(); + const match = items.find((i) => i.toLowerCase() === lower); + return match ?? items[0]; } export const UniversalTabs: React.FC = ({ items, children, optionKey = "language", + variant = "tabs", }) => { + const router = useRouter(); + const basePath = router.basePath || ""; + const { selectedLanguage, setSelectedLanguage, @@ -101,6 +109,8 @@ export const UniversalTabs: React.FC = ({ const selectedValue = optionKey === "language" ? selectedLanguage : getSelectedOption(optionKey); + const resolvedValue = resolveSelectedItem(items, selectedValue); + const handleChange = (index: number) => { if (optionKey === "language") { setSelectedLanguage(items[index]); @@ -109,11 +119,11 @@ export const UniversalTabs: React.FC = ({ } }; - const tabLabels = items.map(toTabLabel); + const tabLabels = items.map((item) => toTabLabel(item, basePath)); - // Inject early access callout into SDK tabs that are in early access + // Inject early access callout into SDK tabs that are in early access (skip for hidden variant) const processedChildren = - optionKey === "language" + optionKey === "language" && variant !== "hidden" ? React.Children.map(children, (child) => { if ( React.isValidElement<{ @@ -136,11 +146,28 @@ export const UniversalTabs: React.FC = ({ }) : children; + if (variant === "hidden") { + const childrenByItem = new Map(); + React.Children.forEach(processedChildren, (child) => { + if ( + React.isValidElement<{ title?: string; children?: React.ReactNode }>( + child, + ) && + child.props.title + ) { + const key = resolveSelectedItem(items, child.props.title); + childrenByItem.set(key, child.props.children); + } + }); + const selectedContent = childrenByItem.get(resolvedValue) ?? null; + return
{selectedContent}
; + } + return ( diff --git a/frontend/docs/components/WorkflowComparison.tsx b/frontend/docs/components/WorkflowComparison.tsx new file mode 100644 index 0000000000..cb42b87ac2 --- /dev/null +++ b/frontend/docs/components/WorkflowComparison.tsx @@ -0,0 +1,370 @@ +import React, { useState } from "react"; + +type Mode = "workflows" | "durable"; + +const WorkflowComparison: React.FC = () => { + const [active, setActive] = useState("workflows"); + + const data = { + workflows: { + label: "Workflows (DAGs)", + color: "indigo", + items: [ + { + icon: ( + + + + + + + + ), + title: "Structure", + desc: "DAG of tasks with declared dependencies", + }, + { + icon: ( + + + + + + ), + title: "State", + desc: "Cached between tasks automatically", + }, + { + icon: ( + + + + + ), + title: "Pausing", + desc: "Declarative conditions on task definitions", + }, + { + icon: ( + + + + ), + title: "Recovery", + desc: "Re-runs failed tasks; completed tasks are skipped", + }, + { + icon: ( + + + + + ), + title: "Slots", + desc: "Each task holds a slot while running", + }, + ], + }, + durable: { + label: "Durable Workflows", + color: "emerald", + items: [ + { + icon: ( + + + + + + + + ), + title: "Structure", + desc: "Long-running function with checkpoints", + }, + { + icon: ( + + + + + + ), + title: "State", + desc: "Stored in a durable event log", + }, + { + icon: ( + + + + + ), + title: "Pausing", + desc: "Inline SleepFor and WaitForEvent calls", + }, + { + icon: ( + + + + + + + ), + title: "Recovery", + desc: "Replays from last checkpoint automatically", + }, + { + icon: ( + + + + + + ), + title: "Slots", + desc: "Freed during waits — no wasted compute", + }, + ], + }, + }; + + const current = data[active]; + const isWorkflows = active === "workflows"; + + return ( +
+ {/* Toggle */} +
+ {(["workflows", "durable"] as Mode[]).map((mode) => ( + + ))} +
+ + {/* Cards */} +
+
+ {current.items.map((item, i) => ( +
+
+ {item.icon} +
+
+
+ {item.title} +
+
{item.desc}
+
+
+ ))} +
+ + {/* Best-for footer */} +
+ Best for: + {isWorkflows + ? "Predictable multi-step pipelines, ETL, CI/CD, and any workflow with a known shape" + : "Long waits, human-in-the-loop, large fan-outs, and complex procedural logic"} +
+
+
+ ); +}; + +export default WorkflowComparison; diff --git a/frontend/docs/components/WorkflowComparisonWrapper.tsx b/frontend/docs/components/WorkflowComparisonWrapper.tsx new file mode 100644 index 0000000000..657f46af31 --- /dev/null +++ b/frontend/docs/components/WorkflowComparisonWrapper.tsx @@ -0,0 +1,7 @@ +import dynamic from "next/dynamic"; + +const WorkflowComparison = dynamic(() => import("./WorkflowComparison"), { + ssr: false, +}); + +export default WorkflowComparison; diff --git a/frontend/docs/components/WorkflowDiagram.tsx b/frontend/docs/components/WorkflowDiagram.tsx new file mode 100644 index 0000000000..0e100e991d --- /dev/null +++ b/frontend/docs/components/WorkflowDiagram.tsx @@ -0,0 +1,289 @@ +import React from "react"; +import { brand, state, fill, inactive, gradient } from "./diagram-colors"; + +const WorkflowDiagram: React.FC = () => { + const nodeW = 130; + const nodeH = 46; + const rx = 10; + + return ( +
+
+ + + + + + + + + + + + + + + + + + + {/* Workflow label */} + + WORKFLOW + + + {/* Dashed container */} + + + {/* --- Row 1: Task A → Task B --- */} + {/* Task A */} + + + Task A + + + {/* Edge A → B */} + + + {/* Task B */} + + + Task B + + + {/* --- Fan out: B → C and B → D --- */} + {/* Edge B → C */} + + + {/* Edge B → D */} + + + {/* Task C */} + + + Task C + + + {/* Task D */} + + + Task D + + + {/* --- Both converge to Result --- */} + {/* Edges C → Result, D → Result drawn before Result box so they don't overlap text */} + + {/* Annotations */} + + start + + + depends on A + + + parallel + + + parallel + + + {/* Vertical dashed line between sequential and parallel sections */} + + + fan-out + + + {/* Labels for sections */} + + sequential + + +
+
+ ); +}; + +export default WorkflowDiagram; diff --git a/frontend/docs/components/code/CodeBlock.tsx b/frontend/docs/components/code/CodeBlock.tsx index d71382757f..6082440fd9 100644 --- a/frontend/docs/components/code/CodeBlock.tsx +++ b/frontend/docs/components/code/CodeBlock.tsx @@ -2,6 +2,7 @@ import React from "react"; import { parseDocComments } from "./codeParser"; import CodeStyleRender from "./CodeStyleRender"; import { Button } from "../ui/button"; +import { LanguageLogo } from "@/lib/language-logos"; import { CheckIcon, CopyIcon, @@ -33,18 +34,25 @@ export const CodeBlock = ({ source, target }: CodeRendererProps) => { source.raw.includes("// ...") || source.raw.includes("# ..."); return ( - <> +
-
+
{source.githubUrl && ( - - {source.codePath} - + <> + + + {source.codePath} + + )}
@@ -146,6 +154,6 @@ export const CodeBlock = ({ source, target }: CodeRendererProps) => {
- +
); }; diff --git a/frontend/docs/components/code/CodeTabs.tsx b/frontend/docs/components/code/CodeTabs.tsx index 8b59e9f83a..1cffbbcc19 100644 --- a/frontend/docs/components/code/CodeTabs.tsx +++ b/frontend/docs/components/code/CodeTabs.tsx @@ -1,7 +1,8 @@ import React from "react"; +import { DOC_LANGUAGES } from "@/lib/docs-languages"; import UniversalTabs from "../UniversalTabs"; -const languages = ["Python", "Typescript", "Go", "Ruby"]; +const languages = [...DOC_LANGUAGES]; type CodeSource = { path?: string; @@ -45,6 +46,3 @@ export const CodeTabs: React.FC = ({ children }) => { }; export default UniversalTabs; -function useMemo(arg0: () => any, arg1: any[]) { - throw new Error("Function not implemented."); -} diff --git a/frontend/docs/components/diagram-colors.ts b/frontend/docs/components/diagram-colors.ts new file mode 100644 index 0000000000..c471450bbe --- /dev/null +++ b/frontend/docs/components/diagram-colors.ts @@ -0,0 +1,75 @@ +/** + * Shared color constants for all documentation diagrams. + * + * Brand palette: sourced from the marketing CSS and docs global.css + * State colors: match the dashboard badge conventions (badge.tsx / run-statuses.tsx) + */ + +// ── Brand palette ────────────────────────────────────────────────────────── +export const brand = { + navy: "#0A1029", + navyDark: "#02081D", + cyan: "#B8D9FF", + cyanDark: "#A5C5E9", + blue: "#3392FF", + blueLight: "#85BDFF", + magenta: "#BC46DD", + magentaLight: "#D585EF", + yellow: "#B8D41C", +} as const; + +// ── State colors (dashboard-consistent) ──────────────────────────────────── +export const state = { + success: "#22C55E", + successLight: "#4ADE80", + successLighter: "#86EFAC", + running: "#EAB308", + runningLight: "#FACC15", + runningDark: "#CA8A04", + failed: "#EF4444", + failedLight: "#FCA5A5", + queued: "#64748B", + cancelled: "#F97316", +} as const; + +// ── Fills (used for node backgrounds at reduced opacity) ─────────────────── +export const fill = { + activeNode: "rgba(10, 16, 41, 0.3)", + inactiveNode: "rgba(10, 16, 41, 0.15)", + success: "rgba(34, 197, 94, 0.3)", + successLight: "rgba(34, 197, 94, 0.1)", + running: "rgba(234, 179, 8, 0.25)", + runningLight: "rgba(234, 179, 8, 0.15)", + failed: "rgba(239, 68, 68, 0.25)", + magenta: "rgba(188, 70, 221, 0.2)", + magentaLight: "rgba(188, 70, 221, 0.15)", + blue: "rgba(51, 146, 255, 0.1)", + dimmed: "rgba(10, 16, 41, 0.15)", +} as const; + +// ── Container styling ────────────────────────────────────────────────────── +export const container = { + border: "rgba(51, 146, 255, 0.2)", + bg: "rgba(10, 16, 41, 0.04)", +} as const; + +// ── Inactive / dimmed elements ───────────────────────────────────────────── +export const inactive = { + stroke: "#1C2B4A", + text: "#64748B", + textLight: "#4A6080", + fill: "#0A1029", + edge: "#162035", + line: "#1C2B4A", + dot: "#162035", + progress: "rgba(10, 16, 41, 0.5)", +} as const; + +// ── Gradient stop pairs [start, end] for SVG linearGradient ──────────────── +export const gradient = { + blue: ["rgb(51, 146, 255)", "rgb(133, 189, 255)"] as const, + magenta: ["rgb(188, 70, 221)", "rgb(213, 133, 239)"] as const, + green: ["rgb(34, 197, 94)", "rgb(74, 222, 128)"] as const, + yellow: ["rgb(234, 179, 8)", "rgb(250, 204, 21)"] as const, + red: ["rgb(239, 68, 68)", "rgb(248, 113, 113)"] as const, +} as const; diff --git a/frontend/docs/components/ui/cookie-banner.tsx b/frontend/docs/components/ui/cookie-banner.tsx index 68adf9bde0..fafc9b42f7 100644 --- a/frontend/docs/components/ui/cookie-banner.tsx +++ b/frontend/docs/components/ui/cookie-banner.tsx @@ -119,7 +119,7 @@ export default function CookieConsent({
Learn more. diff --git a/frontend/docs/components/ui/dialog.tsx b/frontend/docs/components/ui/dialog.tsx new file mode 100644 index 0000000000..5fd652fdcb --- /dev/null +++ b/frontend/docs/components/ui/dialog.tsx @@ -0,0 +1,124 @@ +import * as DialogPrimitive from "@radix-ui/react-dialog"; +import { X } from "lucide-react"; +import * as React from "react"; + +import { cn } from "@/lib/utils"; + +const Dialog = DialogPrimitive.Root; + +const DialogTrigger = DialogPrimitive.Trigger; + +const DialogPortal = DialogPrimitive.Portal; + +const DialogClose = DialogPrimitive.Close; + +const DialogOverlay = React.forwardRef< + React.ElementRef, + React.ComponentPropsWithoutRef +>(({ className, ...props }, ref) => ( + +)); +DialogOverlay.displayName = DialogPrimitive.Overlay.displayName; + +const DialogContent = React.forwardRef< + React.ElementRef, + React.ComponentPropsWithoutRef & { + showCloseButton?: boolean; + } +>(({ className, children, showCloseButton = true, ...props }, ref) => ( + + + + {children} + {showCloseButton && ( + + + Close + + )} + + +)); +DialogContent.displayName = DialogPrimitive.Content.displayName; + +const DialogHeader = ({ + className, + ...props +}: React.HTMLAttributes) => ( +
+); +DialogHeader.displayName = "DialogHeader"; + +const DialogFooter = ({ + className, + ...props +}: React.HTMLAttributes) => ( +
+); +DialogFooter.displayName = "DialogFooter"; + +const DialogTitle = React.forwardRef< + React.ElementRef, + React.ComponentPropsWithoutRef +>(({ className, ...props }, ref) => ( + +)); +DialogTitle.displayName = DialogPrimitive.Title.displayName; + +const DialogDescription = React.forwardRef< + React.ElementRef, + React.ComponentPropsWithoutRef +>(({ className, ...props }, ref) => ( + +)); +DialogDescription.displayName = DialogPrimitive.Description.displayName; + +export { + Dialog, + DialogPortal, + DialogOverlay, + DialogClose, + DialogTrigger, + DialogContent, + DialogHeader, + DialogFooter, + DialogTitle, + DialogDescription, +}; diff --git a/frontend/docs/components/ui/select.tsx b/frontend/docs/components/ui/select.tsx new file mode 100644 index 0000000000..236cbcb722 --- /dev/null +++ b/frontend/docs/components/ui/select.tsx @@ -0,0 +1,127 @@ +import { cn } from "@/lib/utils"; +import { + CheckIcon, + ChevronDownIcon, + ChevronUpIcon, +} from "@radix-ui/react-icons"; +import * as SelectPrimitive from "@radix-ui/react-select"; +import * as React from "react"; + +const Select = SelectPrimitive.Root; + +const SelectValue = SelectPrimitive.Value; + +const SelectTrigger = React.forwardRef< + React.ElementRef, + React.ComponentPropsWithoutRef & { + size?: "default" | "sm"; + } +>(({ className, size = "default", children, ...props }, ref) => ( + span]:line-clamp-1", + size === "sm" && "h-8 px-2 py-1.5 text-xs", + size === "default" && "h-9 px-3 py-2", + className, + )} + {...props} + > + {children} + + + + +)); +SelectTrigger.displayName = SelectPrimitive.Trigger.displayName; + +const SelectScrollUpButton = React.forwardRef< + React.ElementRef, + React.ComponentPropsWithoutRef +>(({ className, ...props }, ref) => ( + + + +)); +SelectScrollUpButton.displayName = SelectPrimitive.ScrollUpButton.displayName; + +const SelectScrollDownButton = React.forwardRef< + React.ElementRef, + React.ComponentPropsWithoutRef +>(({ className, ...props }, ref) => ( + + + +)); +SelectScrollDownButton.displayName = + SelectPrimitive.ScrollDownButton.displayName; + +const SelectContent = React.forwardRef< + React.ElementRef, + React.ComponentPropsWithoutRef +>(({ className, children, position = "popper", ...props }, ref) => ( + + + + + {children} + + + + +)); +SelectContent.displayName = SelectPrimitive.Content.displayName; + +const SelectItem = React.forwardRef< + React.ElementRef, + React.ComponentPropsWithoutRef +>(({ className, children, ...props }, ref) => ( + + + + + + + {children} + +)); +SelectItem.displayName = SelectPrimitive.Item.displayName; + +export { Select, SelectValue, SelectTrigger, SelectContent, SelectItem }; diff --git a/frontend/docs/components/ui/tooltip.tsx b/frontend/docs/components/ui/tooltip.tsx new file mode 100644 index 0000000000..6256ce6ea2 --- /dev/null +++ b/frontend/docs/components/ui/tooltip.tsx @@ -0,0 +1,28 @@ +import * as React from "react"; +import * as TooltipPrimitive from "@radix-ui/react-tooltip"; + +import { cn } from "@/lib/utils"; + +const TooltipProvider = TooltipPrimitive.Provider; + +const Tooltip = TooltipPrimitive.Root; + +const TooltipTrigger = TooltipPrimitive.Trigger; + +const TooltipContent = React.forwardRef< + React.ElementRef, + React.ComponentPropsWithoutRef +>(({ className, sideOffset = 4, ...props }, ref) => ( + +)); +TooltipContent.displayName = TooltipPrimitive.Content.displayName; + +export { Tooltip, TooltipTrigger, TooltipContent, TooltipProvider }; diff --git a/frontend/docs/context/LanguageContext.tsx b/frontend/docs/context/LanguageContext.tsx index bea15641e1..9f6558152e 100644 --- a/frontend/docs/context/LanguageContext.tsx +++ b/frontend/docs/context/LanguageContext.tsx @@ -1,4 +1,5 @@ -import React, { createContext, useContext, useState, ReactNode, useEffect } from "react"; +import React, { createContext, useContext, useState, useRef, ReactNode, useEffect } from "react"; +import { DEFAULT_LANGUAGE } from "@/lib/docs-languages"; type OptionsState = { [key: string]: string; @@ -12,7 +13,7 @@ type LanguageContextType = { }; const LanguageContext = createContext({ - selectedLanguage: "Python", + selectedLanguage: DEFAULT_LANGUAGE, setSelectedLanguage: () => {}, getSelectedOption: () => "", setSelectedOption: () => {}, @@ -23,11 +24,15 @@ export const useLanguage = () => useContext(LanguageContext); export const LanguageProvider: React.FC<{ children: ReactNode }> = ({ children, }) => { - const [options, setOptions] = useState({ language: "Python" }); + const [options, setOptions] = useState({ + language: DEFAULT_LANGUAGE, + }); + const dirty = useRef(false); + + const selectedLanguage = options.language || DEFAULT_LANGUAGE; - // For backward compatibility - const selectedLanguage = options.language || "Python"; const setSelectedLanguage = (language: string) => { + dirty.current = true; setOptions((prev) => ({ ...prev, language })); }; @@ -36,41 +41,37 @@ export const LanguageProvider: React.FC<{ children: ReactNode }> = ({ }; const setSelectedOption = (key: string, value: string) => { + dirty.current = true; setOptions((prev) => ({ ...prev, [key]: value })); }; useEffect(() => { - if (typeof window !== "undefined") { - // Load all saved options from localStorage - const savedOptions = localStorage.getItem("uiOptions"); - if (savedOptions) { - try { - setOptions(JSON.parse(savedOptions)); - } catch (e) { - // Fallback for backward compatibility - const savedLanguage = localStorage.getItem("selectedLanguage"); - if (savedLanguage) { - setOptions({ language: savedLanguage }); - } - } - } else { - // Backward compatibility + if (typeof window === "undefined") return; + const savedOptions = localStorage.getItem("uiOptions"); + if (savedOptions) { + try { + setOptions(JSON.parse(savedOptions)); + } catch { const savedLanguage = localStorage.getItem("selectedLanguage"); if (savedLanguage) { setOptions({ language: savedLanguage }); } } + } else { + const savedLanguage = localStorage.getItem("selectedLanguage"); + if (savedLanguage) { + setOptions({ language: savedLanguage }); + } } }, []); useEffect(() => { - if (typeof window !== "undefined") { - // Save all options to localStorage - localStorage.setItem("uiOptions", JSON.stringify(options)); - - // Also save language separately for backward compatibility - localStorage.setItem("selectedLanguage", options.language || "Python"); - } + if (typeof window === "undefined" || !dirty.current) return; + localStorage.setItem("uiOptions", JSON.stringify(options)); + localStorage.setItem( + "selectedLanguage", + options.language || DEFAULT_LANGUAGE + ); }, [options]); return ( diff --git a/frontend/docs/guides/STEP_SCHEMA.md b/frontend/docs/guides/STEP_SCHEMA.md new file mode 100644 index 0000000000..070cafe9c2 --- /dev/null +++ b/frontend/docs/guides/STEP_SCHEMA.md @@ -0,0 +1,97 @@ +# Guide Step Schema + +Shared step names used across Python, TypeScript, Go, and Ruby examples. +Snippet keys follow: `snippets..guides...`. + +## human-in-the-loop +- `step_01_define_approval_task` - Durable task that proposes action +- `step_02_wait_for_event` - WaitForEvent for approval key +- `step_03_push_approval_event` - Push event from frontend/API +- `step_04_run_worker` - Worker registration and start + +## ai-agents +- `step_01_define_agent_task` - Durable task with reasoning loop +- `step_02_reasoning_loop` - LLM call, tool execution, loop +- `step_03_stream_response` - put_stream for token streaming +- `step_04_run_worker` - Worker with concurrency control + +## batch-processing +- `step_01_define_parent_task` - Parent workflow with batch input +- `step_02_fan_out_children` - Spawn child per item +- `step_03_process_item` - Child task processes single item +- `step_04_run_worker` - Worker with parent and child workflows + +## document-processing +- `step_01_define_dag` - DAG workflow: ingest -> parse -> extract -> validate +- `step_02_parse_stage` - Parse stage (mock OCR) +- `step_03_extract_stage` - Extract stage (mock LLM) +- `step_04_run_worker` - Worker with DAG workflows + +## event-driven +- `step_01_define_event_task` - Task triggered by event +- `step_02_register_event_trigger` - onEvents / event trigger +- `step_03_push_event` - Push event to trigger task +- `step_04_run_worker` - Worker registration + +## llm-pipelines +- `step_01_define_pipeline` - DAG with prompt -> generate -> validate +- `step_02_prompt_task` - Build prompt (mock LLM) +- `step_03_validate_task` - Validate and retry on failure +- `step_04_run_worker` - Worker with rate limit + +## rag-and-indexing +- `step_01_define_ingest_task` - Ingest documents +- `step_02_chunk_task` - Fan out, chunk per document +- `step_03_embed_task` - Embed chunks (mock) +- `step_04_run_worker` - Worker with rate limit + +## scheduled-jobs +- `step_01_define_cron_task` - Cron-triggered task +- `step_02_schedule_one_time` - One-time scheduled run +- `step_03_run_worker` - Worker with cron workflow + +## streaming +- `step_01_define_streaming_task` - Task that emits chunks +- `step_02_emit_chunks` - put_stream in worker +- `step_03_subscribe_client` - subscribe_to_stream on client +- `step_04_run_worker` - Worker registration + +## webhook-processing +- `step_01_define_webhook_task` - Task triggered by webhook +- `step_02_register_webhook` - Webhook trigger config +- `step_03_process_payload` - Process webhook payload +- `step_04_run_worker` - Worker registration + +## evaluator-optimizer +- `step_01_define_tasks` - Generator and evaluator child tasks +- `step_02_optimization_loop` - Durable task that loops generate → evaluate → feedback +- `step_03_run_worker` - Worker registration + +## routing +- `step_01_classify_task` - Classification task (LLM or rule-based) +- `step_02_specialist_tasks` - Specialist handler tasks (support, sales, default) +- `step_03_router_task` - Durable router task with if/else + RunChild +- `step_04_run_worker` - Worker registration + +## multi-agent +- `step_01_specialist_agents` - Specialist workflows (research, writing, code) +- `step_02_orchestrator_loop` - Durable orchestrator reasoning loop +- `step_03_run_worker` - Worker registration + +## web-scraping +- `step_01_scrape_task` - Scrape a single URL (with retries) +- `step_02_fan_out_scrape` - Fan out to scrape multiple URLs +- `step_03_cron_refresh` - Cron workflow to refresh scrapes on schedule +- `step_04_run_worker` - Worker registration + +## web-scraping +- `step_01_define_scrape_task` - Task that scrapes a URL (Firecrawl, Playwright, etc.) +- `step_02_process_content` - Extract/transform scraped content (optionally with LLM) +- `step_03_cron_workflow` - Cron workflow to refresh scrapes on a schedule +- `step_04_run_worker` - Worker registration + +## parallelization +- `step_01_parallel_tasks` - Tasks that run concurrently (content, safety, evaluate) +- `step_02_sectioning` - Sectioning pattern: different concerns in parallel +- `step_03_voting` - Voting pattern: same evaluation N times, aggregate +- `step_04_run_worker` - Worker registration diff --git a/frontend/docs/lib/docs-languages.ts b/frontend/docs/lib/docs-languages.ts new file mode 100644 index 0000000000..9f43895ec3 --- /dev/null +++ b/frontend/docs/lib/docs-languages.ts @@ -0,0 +1,45 @@ +/** + * Single source of truth for documentation languages and their metadata. + * Used by SidebarLanguageButton, LanguageSwitcher, UniversalTabs, LanguageContext, + * PackageManagerInstall, InstallCommand, and integration tab components. + */ + +export const DOC_LANGUAGES = ["Python", "Typescript", "Go", "Ruby"] as const; +export type DocLanguage = (typeof DOC_LANGUAGES)[number]; + +export const DEFAULT_LANGUAGE: DocLanguage = "Python"; + +/** Logo filename in public/ for each language. Includes aliases used in UniversalTabs. */ +export const LOGO_PATHS: Record = { + Python: "python-logo.svg", + "Python-Sync": "python-logo.svg", + "Python-Async": "python-logo.svg", + Typescript: "typescript-logo.svg", + Go: "go-logo.svg", + Ruby: "ruby-logo.svg", +}; + +/** Package manager options for languages that support choice. Null = fixed tool. */ +export const PACKAGE_MANAGERS: Record< + DocLanguage, + readonly string[] | { fixed: string } +> = { + Python: ["pip", "poetry", "uv"], + Typescript: ["npm", "pnpm", "yarn"], + Go: ["go get"], + Ruby: ["bundle"], +}; + +export function getPackageManagers( + lang: DocLanguage +): readonly string[] | null { + const pm = PACKAGE_MANAGERS[lang]; + if (Array.isArray(pm)) return pm; + return null; +} + +export function getFixedPackageManagerMessage(lang: DocLanguage): string | null { + const pm = PACKAGE_MANAGERS[lang]; + if (pm && typeof pm === "object" && "fixed" in pm) return pm.fixed; + return null; +} diff --git a/frontend/docs/lib/language-logos.tsx b/frontend/docs/lib/language-logos.tsx new file mode 100644 index 0000000000..a77b4abc95 --- /dev/null +++ b/frontend/docs/lib/language-logos.tsx @@ -0,0 +1,57 @@ +import React from "react"; +import { useRouter } from "next/router"; + +/** Maps highlight language abbreviation to logo filename in public/ */ +const LOGO_BY_LANG: Record = { + py: "python-logo.svg", + ts: "typescript-logo.svg", + go: "go-logo.svg", + rb: "ruby-logo.svg", +}; + +/** Renders an SVG as a CSS mask filled with currentColor (works in light + dark mode). */ +function ThemedIcon({ src, size = 16 }: { src: string; size?: number }) { + return ( + + ); +} + +/** Renders the language logo if we have one for the given language abbrev (py, ts, go, rb). */ +export function LanguageLogo({ + language, + className, + size = 16, +}: { + language: string; + className?: string; + size?: number; +}) { + const router = useRouter(); + const basePath = router.basePath || ""; + const filename = LOGO_BY_LANG[language?.toLowerCase()]; + if (!filename) return null; + const src = `${basePath}/${filename}`.replace(/\/+/g, "/"); + return ( + + + + ); +} diff --git a/frontend/docs/next.config.mjs b/frontend/docs/next.config.mjs index 2fa1b11e78..0120f05925 100644 --- a/frontend/docs/next.config.mjs +++ b/frontend/docs/next.config.mjs @@ -1,5 +1,9 @@ // Using ESM for Nextra v4 import nextra from 'nextra' +import path from 'path' +import { fileURLToPath } from 'url' + +const __dirname = path.dirname(fileURLToPath(import.meta.url)) // Configure Nextra for MDX and docs const withNextra = nextra({ @@ -13,6 +17,13 @@ const withNextra = nextra({ /** @type {import('next').NextConfig} */ const nextConfig = { + webpack(config) { + config.resolve.alias['@theguild/remark-mermaid/mermaid'] = path.resolve( + __dirname, + 'components/Mermaid.tsx', + ) + return config + }, transpilePackages: ["react-tweet"], swcMinify: false, images: { @@ -20,202 +31,152 @@ const nextConfig = { }, async redirects() { return [ - { - source: '/compute', - destination: '/home/compute', - permanent: true, - }, - { - source: '/compute/:path', - destination: '/home/compute', - permanent: true, - }, - { - source: '/:path((?!api|agent-instructions|home|cli|v1|v0|compute|sdk|contributing|self-hosting|launches|blog|llms|favicon\\.ico|.*\\.png|.*\\.gif|.*\\.svg|_next/.*|monitoring\-demo\.mp4).*)', - destination: '/home/:path*', - permanent: false, - }, - { - source: "/ingest/:path*", - destination: "https://app.posthog.com/:path*", - permanent: false, - }, - { - source: "/home/install-docs-mcp", - destination: "/home/coding-agents", - permanent: true, - }, - { - source: "/home/basics/overview", - destination: "/home/setup", - permanent: false, - }, - { - source: "/home/basics/(steps|workflows)", - destination: "/home/your-first-task", - permanent: false, - }, - { - source: "/home/basics/environments", - destination: "/home/environments", - permanent: false, - }, - { - source: "/home/features/concurrency/:path*", - destination: "/home/concurrency", - permanent: false, - }, - { - source: "/home/features/durable-execution", - destination: "/home/durable-execution", - permanent: false, - }, - { - source: "/home/features/retries/:path*", - destination: "/home/retry-policies", - permanent: false, - }, - { - source: "/home/features/errors-and-logging", - destination: "/home/logging", - permanent: false, - }, - { - source: "/home/features/on-failure-step", - destination: "/home/on-failure-tasks", - permanent: false, - }, - { - source: "/home/features/triggering-runs/event-trigger", - destination: "/home/run-on-event", - permanent: false, - }, - { - source: "/home/features/triggering-runs/cron-trigger", - destination: "/home/cron-runs", - permanent: false, - }, - { - source: "/home/features/triggering-runs/schedule-trigger", - destination: "/home/scheduled-runs", - permanent: false, - }, - { - source: "/home/features/rate-limits", - destination: "/home/rate-limits", - permanent: false, - }, - { - source: "/home/features/worker-assignment/overview", - destination: "/home/sticky-assignment", - permanent: false, - }, - { - source: "/home/features/worker-assignment/(overview|sticky-assignment)", - destination: "/home/sticky-assignment", - permanent: false, - }, - { - source: "/home/features/worker-assignment/worker-affinity", - destination: "/home/worker-affinity", - permanent: false, - }, - { - source: "/home/features/additional-metadata", - destination: "/home/additional-metadata", - permanent: false, - }, - { - source: "/home/features/advanced/manual-slot-release", - destination: "/home/manual-slot-release", - permanent: false, - }, - { - source: "/home/features/opentelemetry", - destination: "/home/opentelemetry", - permanent: false, - }, - { - source: "/home/features/cancellation", - destination: "/home/cancellation", - permanent: false, - }, - { - source: "/home/features/child-workflows", - destination: "/home/child-spawning", - permanent: false, - }, - { - source: "/sdks/python-sdk/:path*", - destination: "/sdks/python/client", - permanent: false, - }, - { - source: "/sdks/python", - destination: "/sdks/python/client", - permanent: false, - }, + // --- New site: section index redirects --- + { source: '/', destination: '/v1', permanent: false, basePath: false }, + { source: '/get-started', destination: '/v1', permanent: false, basePath: false }, + { source: '/reference', destination: '/reference/python/client', permanent: false, basePath: false }, + { source: '/reference/', destination: '/reference/python/client', permanent: false, basePath: false }, + { source: '/reference/python', destination: '/reference/python/client', permanent: false, basePath: false }, + { source: '/reference/python/', destination: '/reference/python/client', permanent: false, basePath: false }, + { source: '/reference/typescript', destination: '/reference/typescript/client', permanent: false, basePath: false }, + { source: '/reference/typescript/', destination: '/reference/typescript/client', permanent: false, basePath: false }, + { source: '/v1/migrating', destination: '/v1/migrating/v1-sdk-improvements', permanent: false, basePath: false }, + { source: '/v1/migrating/', destination: '/v1/migrating/v1-sdk-improvements', permanent: false, basePath: false }, + { source: '/agent-instructions', destination: '/agent-instructions/setup-cli', permanent: false, basePath: false }, + { source: '/agent-instructions/', destination: '/agent-instructions/setup-cli', permanent: false, basePath: false }, + { source: '/reference/typescript/feature-clients', destination: '/reference/typescript/feature-clients/crons', permanent: false, basePath: false }, + { source: '/reference/typescript/feature-clients/', destination: '/reference/typescript/feature-clients/crons', permanent: false, basePath: false }, + { source: '/reference/python/feature-clients', destination: '/reference/python/feature-clients/cron', permanent: false, basePath: false }, + { source: '/reference/python/feature-clients/', destination: '/reference/python/feature-clients/cron', permanent: false, basePath: false }, + // --- Old main: /home/* → /v1/* (only paths that existed on main) --- + { source: '/home/conditional-workflows', destination: '/v1/conditions', permanent: true, basePath: false }, + { source: '/home/on-failure-tasks', destination: '/v1/on-failure', permanent: true, basePath: false }, + { source: '/home/durable-execution', destination: '/v1/patterns/durable-task-execution', permanent: true, basePath: false }, + { source: '/home/:slug(dags|orchestration)', destination: '/v1/patterns/directed-acyclic-graphs', permanent: true, basePath: false }, + { source: '/home/durable-sleep', destination: '/v1/sleep', permanent: true, basePath: false }, + { source: '/home/durable-events', destination: '/v1/events', permanent: true, basePath: false }, + { source: '/home/durable-best-practices', destination: '/v1/patterns/mixing-patterns', permanent: true, basePath: false }, + { source: '/home/architecture', destination: '/v1/architecture-and-guarantees', permanent: true, basePath: false }, + { source: '/home/your-first-task', destination: '/v1/tasks', permanent: true, basePath: false }, + { source: '/home/running-tasks', destination: '/v1/tasks', permanent: true, basePath: false }, + { source: '/home/setup', destination: '/v1/setup/advanced', permanent: true, basePath: false }, + { source: '/home/hatchet-cloud-quickstart', destination: '/v1/quickstart', permanent: true, basePath: false }, + { source: '/home/coding-agents', destination: '/v1/setup/using-coding-agents', permanent: true, basePath: false }, + { source: '/home/install-docs-mcp', destination: '/v1/setup/using-coding-agents', permanent: true, basePath: false }, + { source: '/home/guarantees-and-tradeoffs', destination: '/v1/architecture-and-guarantees', permanent: true, basePath: false }, + { source: '/home/v1-sdk-improvements', destination: '/v1/migrating/v1-sdk-improvements', permanent: true, basePath: false }, + { source: '/home/migration-guide-engine', destination: '/v1/migrating/migration-guide-engine', permanent: true, basePath: false }, + { source: '/home/migration-guide-python', destination: '/v1/migrating/migration-guide-python', permanent: true, basePath: false }, + { source: '/home/migration-guide-typescript', destination: '/v1/migrating/migration-guide-typescript', permanent: true, basePath: false }, + { source: '/home/migration-guide-go', destination: '/v1/migrating/migration-guide-go', permanent: true, basePath: false }, + { source: '/home/:slug(asyncio|pydantic|lifespans|dependency-injection|dataclasses)', destination: '/reference/python/:slug', permanent: true, basePath: false }, + // Old main had redirects from /home/basics/* and /home/features/* → ensure those source URLs still resolve + { source: '/home/basics/overview', destination: '/v1/setup/advanced', permanent: true, basePath: false }, + { source: '/home/basics/(steps|workflows)', destination: '/v1/tasks', permanent: true, basePath: false }, + { source: '/home/basics/environments', destination: '/v1/setup/advanced/environments', permanent: true, basePath: false }, + { source: '/home/features/concurrency/:path*', destination: '/v1/concurrency', permanent: true, basePath: false }, + { source: '/home/features/durable-execution', destination: '/v1/patterns/durable-task-execution', permanent: true, basePath: false }, + { source: '/home/features/retries/:path*', destination: '/v1/retry-policies', permanent: true, basePath: false }, + { source: '/home/features/errors-and-logging', destination: '/v1/logging', permanent: true, basePath: false }, + { source: '/home/features/on-failure-step', destination: '/v1/on-failure', permanent: true, basePath: false }, + { source: '/home/features/triggering-runs/event-trigger', destination: '/v1/external-events/run-on-event', permanent: true, basePath: false }, + { source: '/home/features/triggering-runs/cron-trigger', destination: '/v1/cron-runs', permanent: true, basePath: false }, + { source: '/home/features/triggering-runs/schedule-trigger', destination: '/v1/scheduled-runs', permanent: true, basePath: false }, + { source: '/home/features/rate-limits', destination: '/v1/rate-limits', permanent: true, basePath: false }, + { source: '/home/features/worker-assignment/overview', destination: '/v1/advanced-assignment/sticky-assignment', permanent: true, basePath: false }, + { source: '/home/features/worker-assignment/(overview|sticky-assignment)', destination: '/v1/advanced-assignment/sticky-assignment', permanent: true, basePath: false }, + { source: '/home/features/worker-assignment/worker-affinity', destination: '/v1/advanced-assignment/worker-affinity', permanent: true, basePath: false }, + { source: '/home/features/additional-metadata', destination: '/v1/additional-metadata', permanent: true, basePath: false }, + { source: '/home/features/advanced/manual-slot-release', destination: '/v1/advanced-assignment/manual-slot-release', permanent: true, basePath: false }, + { source: '/home/features/opentelemetry', destination: '/v1/opentelemetry', permanent: true, basePath: false }, + { source: '/home/features/cancellation', destination: '/v1/cancellation', permanent: true, basePath: false }, + { source: '/home/features/child-workflows', destination: '/v1/child-spawning', permanent: true, basePath: false }, + { source: '/home/:path*', destination: '/v1/:path*', permanent: false, basePath: false }, + // Old main: /compute → /home/compute + { source: '/compute', destination: '/v1/compute', permanent: true, basePath: false }, + { source: '/compute/:path*', destination: '/v1/compute', permanent: true, basePath: false }, + // --- Old main: sdks, cli, guides --- + { source: '/sdks/python-sdk/:path*', destination: '/reference/python/client', permanent: false, basePath: false }, + { source: '/sdks/python', destination: '/reference/python/client', permanent: false, basePath: false }, + { source: '/sdks/:path*', destination: '/reference/:path*', permanent: false, basePath: false }, + { source: '/sdk/:path*', destination: '/reference/:path*', permanent: false, basePath: false }, + { source: '/cli/:path*', destination: '/reference/cli/:path*', permanent: false, basePath: false }, + { source: '/guides/:path*', destination: '/cookbooks/:path*', permanent: true, basePath: false }, + // --- Misc --- + { source: '/ingest/:path*', destination: 'https://app.posthog.com/:path*', permanent: false, basePath: false }, // Blog redirects to hatchet.run { source: "/blog/automated-documentation", destination: "https://hatchet.run/blog/automated-documentation", permanent: true, + basePath: false, }, { source: "/blog/background-tasks-fastapi-hatchet", destination: "https://hatchet.run/blog/fastapi-background-jobs-to-hatchet", permanent: true, + basePath: false, }, { source: "/blog/go-agents", destination: "https://hatchet.run/blog/go-agents", permanent: true, + basePath: false, }, { source: "/blog/warning-event-loop-blocked", destination: "https://hatchet.run/blog/warning-event-loop-blocked", permanent: true, + basePath: false, }, { source: "/blog/fastest-postgres-inserts", destination: "https://hatchet.run/blog/fastest-postgres-inserts", permanent: true, + basePath: false, }, { source: "/blog/task-queue-modern-python", destination: "https://hatchet.run/blog/task-queue-modern-python", permanent: true, + basePath: false, }, { source: "/blog/postgres-events-table", destination: "https://hatchet.run/blog/postgres-events-table", permanent: true, + basePath: false, }, { source: "/blog/migrating-off-prisma", destination: "https://hatchet.run/blog", permanent: true, + basePath: false, }, { source: "/blog/problems-with-celery", destination: "https://hatchet.run/blog/problems-with-celery", permanent: true, + basePath: false, }, { source: "/blog/multi-tenant-queues", destination: "https://hatchet.run/blog/multi-tenant-queues", permanent: true, + basePath: false, }, { source: "/blog/mergent-migration-guide", destination: "https://hatchet.run/blog", permanent: true, + basePath: false, }, { source: "/blog", destination: "https://hatchet.run/blog", permanent: true, - } + basePath: false, + }, ]; }, } diff --git a/frontend/docs/package.json b/frontend/docs/package.json index 7730699894..4be556aefe 100644 --- a/frontend/docs/package.json +++ b/frontend/docs/package.json @@ -27,8 +27,11 @@ }, "homepage": "https://github.com/shuding/nextra-docs-template#readme", "dependencies": { + "@radix-ui/react-dialog": "^1.1.15", "@radix-ui/react-icons": "^1.3.2", + "@radix-ui/react-select": "^2.1.6", "@radix-ui/react-slot": "^1.2.3", + "@radix-ui/react-tooltip": "^1.2.8", "@types/js-yaml": "^4.0.9", "@types/swagger-ui-react": "^5.18.0", "autoprefixer": "^10.4.21", diff --git a/frontend/docs/pages/404.tsx b/frontend/docs/pages/404.tsx new file mode 100644 index 0000000000..6f5a0c27d6 --- /dev/null +++ b/frontend/docs/pages/404.tsx @@ -0,0 +1,133 @@ +import Link from "next/link"; +import { useEffect, useState } from "react"; + +const Logo = ({ color }: { color: string }) => ( + + + +); + +function useDarkMode() { + const [dark, setDark] = useState(true); + + useEffect(() => { + const stored = localStorage.getItem("theme"); + if (stored === "light") { + setDark(false); + } else if (stored === "dark") { + setDark(true); + } else if (stored === "system" || !stored) { + setDark(window.matchMedia("(prefers-color-scheme: dark)").matches); + } + }, []); + + return dark; +} + +const light = { + bg: "#ffffff", + fg: "#0f172a", + muted: "#64748b", + cardBg: "rgba(0,0,0,0.02)", + border: "rgba(0,0,0,0.1)", + hoverBg: "rgba(0,0,0,0.05)", + logo: "hsl(228 61% 10%)", +}; + +const darkTheme = { + bg: "#02081d", + fg: "#e2e8f0", + muted: "#94a3b8", + cardBg: "rgba(255,255,255,0.04)", + border: "rgba(255,255,255,0.1)", + hoverBg: "rgba(255,255,255,0.08)", + logo: "hsl(212 100% 86%)", +}; + +export default function Custom404() { + const dark = useDarkMode(); + const t = dark ? darkTheme : light; + + return ( +
+ + + + +

+ 404 +

+ +

+ This page doesn't exist. It may have been moved or removed. +

+ + { + e.currentTarget.style.background = t.hoverBg; + e.currentTarget.style.borderColor = t.muted; + }} + onMouseLeave={(e) => { + e.currentTarget.style.background = t.cardBg; + e.currentTarget.style.borderColor = t.border; + }} + > + ← Back to Home + +
+ ); +} diff --git a/frontend/docs/pages/_app.tsx b/frontend/docs/pages/_app.tsx index ea7fa61214..5f9d53edb4 100644 --- a/frontend/docs/pages/_app.tsx +++ b/frontend/docs/pages/_app.tsx @@ -5,6 +5,7 @@ import { ConsentProvider } from "../context/ConsentContext"; import CookieConsent from "@/components/ui/cookie-banner"; import { PostHogProvider } from "@/providers/posthog"; import { CrossDomainLinkHandler } from "@/components/CrossDomainLinkHandler"; +import { SidebarFolderNav } from "@/components/SidebarFolderNav"; function MyApp({ Component, pageProps }: AppProps) { return ( @@ -14,6 +15,7 @@ function MyApp({ Component, pageProps }: AppProps) {
+
diff --git a/frontend/docs/pages/_meta.js b/frontend/docs/pages/_meta.js index 64e8e49777..a117f61be0 100644 --- a/frontend/docs/pages/_meta.js +++ b/frontend/docs/pages/_meta.js @@ -1,64 +1,66 @@ export default { - home: { + v1: { title: "Guide", type: "page", theme: { - toc: false, - }, - }, - _setup: { - display: "hidden", - }, - "self-hosting": { - title: "Self Hosting", - type: "page", - theme: { - toc: false, - }, - }, - contributing: { - title: "Contributing", - type: "page", - display: "hidden", - theme: { - toc: false, + toc: true, }, }, - cli: { - title: "CLI Reference", + cookbooks: { + title: "Cookbooks", type: "page", theme: { - toc: false, + toc: true, }, }, - "agent-instructions": { - title: "Agent Instructions", + "self-hosting": { + title: "Self-Hosting", type: "page", - display: "hidden", theme: { - toc: false, + toc: true, }, }, - sdks: { - title: "SDK Reference", + reference: { + title: "Reference", type: "menu", items: { + cli: { + title: "CLI Reference", + href: "/reference/cli", + type: "page", + }, python: { - title: "Python", - href: "/sdks/python/client", + title: "Python SDK", + href: "/reference/python/client", type: "page", }, typescript: { - title: "TypeScript", - href: "/sdks/typescript/client", + title: "Typescript SDK", + href: "/reference/typescript/client", type: "page", }, go: { - title: "Go", + title: "Go SDK", href: "https://pkg.go.dev/github.com/hatchet-dev/hatchet/sdks/go", type: "page", newWindow: true, }, }, }, + contributing: { + title: "Contributing", + type: "page", + display: "hidden", + theme: { + toc: true, + }, + }, + "agent-instructions": { + title: "Agent Instructions", + type: "page", + display: "hidden", + theme: { + toc: false, + }, + }, }; diff --git a/frontend/docs/pages/_setup/_clone/go.mdx b/frontend/docs/pages/_setup/_clone/go.mdx deleted file mode 100644 index fbc1b6444c..0000000000 --- a/frontend/docs/pages/_setup/_clone/go.mdx +++ /dev/null @@ -1,19 +0,0 @@ -import { Callout, Card, Cards, Steps, Tabs } from "nextra/components"; -import UniversalTabs from "@/components/UniversalTabs"; -import InstallCommand from "@/components/InstallCommand"; - -#### Clone a Quickstart Project - -```bash copy -git clone https://github.com/hatchet-dev/hatchet-go-quickstart.git -``` - -#### CD into the project - -```bash copy -cd hatchet-go-quickstart -``` - -#### Install dependencies - - diff --git a/frontend/docs/pages/_setup/_clone/py.mdx b/frontend/docs/pages/_setup/_clone/py.mdx deleted file mode 100644 index aa4e58ca25..0000000000 --- a/frontend/docs/pages/_setup/_clone/py.mdx +++ /dev/null @@ -1,19 +0,0 @@ -import { Callout, Card, Cards, Steps, Tabs } from "nextra/components"; -import UniversalTabs from "@/components/UniversalTabs"; -import InstallCommand from "@/components/InstallCommand"; - -#### Clone a Quickstart Project - -```bash copy -git clone https://github.com/hatchet-dev/hatchet-python-quickstart.git -``` - -#### CD into the project - -```bash copy -cd hatchet-python-quickstart -``` - -#### Install dependencies - - diff --git a/frontend/docs/pages/_setup/_clone/ts.mdx b/frontend/docs/pages/_setup/_clone/ts.mdx deleted file mode 100644 index 549cf15f38..0000000000 --- a/frontend/docs/pages/_setup/_clone/ts.mdx +++ /dev/null @@ -1,19 +0,0 @@ -import { Callout, Card, Cards, Steps, Tabs } from "nextra/components"; -import UniversalTabs from "@/components/UniversalTabs"; -import InstallCommand from "@/components/InstallCommand"; - -#### Clone a Quickstart Project - -```bash copy -git clone https://github.com/hatchet-dev/hatchet-typescript-quickstart.git -``` - -#### CD into the project - -```bash copy -cd hatchet-typescript-quickstart -``` - -#### Install dependencies - - diff --git a/frontend/docs/pages/cookbooks/_meta.js b/frontend/docs/pages/cookbooks/_meta.js new file mode 100644 index 0000000000..64a4d3b5e9 --- /dev/null +++ b/frontend/docs/pages/cookbooks/_meta.js @@ -0,0 +1,18 @@ +export default { + index: "Overview", + "--ai": { + title: "AI", + type: "separator", + }, + "ai-agents": { title: "AI Agents" }, + "human-in-the-loop": "Human-in-the-Loop", + "rag-and-indexing": "RAG & Data Indexing", + "llm-pipelines": "LLM Pipelines", + "--data-processing": { + title: "Data & Processing", + type: "separator", + }, + "batch-processing": "Batch Processing", + "document-processing": "Document Processing", + "web-scraping": "Web Scraping", +}; diff --git a/frontend/docs/pages/cookbooks/ai-agents/_meta.js b/frontend/docs/pages/cookbooks/ai-agents/_meta.js new file mode 100644 index 0000000000..6cc7de8060 --- /dev/null +++ b/frontend/docs/pages/cookbooks/ai-agents/_meta.js @@ -0,0 +1,7 @@ +export default { + index: "What is an AI Agent?", + "reasoning-loop": "Reasoning Loop", + routing: "Routing", + "multi-agent": "Multi-Agent", + parallelization: "Parallelization", +}; diff --git a/frontend/docs/pages/cookbooks/ai-agents/index.mdx b/frontend/docs/pages/cookbooks/ai-agents/index.mdx new file mode 100644 index 0000000000..75401984b4 --- /dev/null +++ b/frontend/docs/pages/cookbooks/ai-agents/index.mdx @@ -0,0 +1,85 @@ +import { Callout, Cards } from "nextra/components"; + +# What is an AI Agent? + +An **AI agent** is a program that uses an LLM to decide what to do next at runtime. Instead of following a fixed pipeline, the agent reasons about its goal, picks a tool or action, observes the result, and loops until the goal is met. This makes agents extremely powerful, but hard to run reliably. + +Agents generally fall into two categories: + +- **Semi-autonomous agents** rely on pre-written code that codifies business logic or procedures. The LLM decides _which_ tool or path to take, but every action it can invoke is defined ahead of time. +- **Fully autonomous agents** can write and execute arbitrary code. The LLM generates code at runtime, runs it, and acts on the output. These agents are highly flexible but require sandboxing and careful guardrails. + +You can build both with Hatchet, but most teams choose semi-autonomous agents for the majority of production workloads because they are easier to reason about, test, and run more reliably. + +Agents fail in production when the process hosting them dies mid-loop, when they hold resources for hours or days while waiting on external input, or when a long-running reasoning chain exhausts a timeout. Hatchet solves these problems by making every agent a **[durable task](/v1/patterns/durable-task-execution)**. + +```mermaid +sequenceDiagram + participant U as User + participant A as Agent (Durable Task) + participant L as LLM + participant T as Tool (Child Task) + + U->>A: Send query + loop Reasoning loop + A->>L: Call LLM + L-->>A: Tool call / response + A->>T: RunChild (tool execution) + T-->>A: Result + end + A-->>U: Final answer +``` + +## How agents map to Hatchet + +| Agent concept | Hatchet primitive | +| ------------------- | ------------------------------------------------------------------------------------------------------------------- | +| Agent | [Durable task](/v1/patterns/durable-task-execution) | +| Reasoning loop | [Child spawning](/v1/durable-workflows/child-spawning): task re-spawns itself until done | +| Tool calls | [Child tasks](/v1/durable-workflows/child-spawning): sequential or [parallel](/v1/durable-workflows/child-spawning) | +| Human approval gate | [WaitForEvent](/v1/durable-workflows/events): slot freed while waiting | +| Routing by LLM | `if`/`else` in code + spawn child to different workflows | + +## Why Hatchet for agents + +**Simple primitives, flexible composition.** Hatchet gives you a small set of primitives for managing state and distributing workloads. You compose them however your agent needs, and they scale reliably without custom infrastructure. + +**Survives crashes.** Every step in an agent's orchestration path is checkpointed. If a worker dies, the agent resumes from the last checkpoint rather than restarting from scratch. + +**Frees slots during waits.** When an agent waits for a human approval event, external event, or sleeps for a scheduled retry, the worker slot is [evicted](/v1/durable-workflows/task-eviction) and freed. No resources are held while the agent is idle, even if the wait lasts hours or days. + +**Handles streaming.** Pipe LLM tokens from inside the task to connected clients as they're generated. Hatchet manages the plumbing so you don't build your own pub/sub layer. See [Streaming](/v1/streaming). + +**Controls concurrency and rate limits.** Use [`CANCEL_IN_PROGRESS`](/v1/concurrency) on a session key so new user messages cancel stale agent runs. Use [`GROUP_ROUND_ROBIN`](/v1/concurrency) to distribute work fairly across users at scale. Add [rate limits](/v1/rate-limits) to stay within external API quotas. + +**Full observability.** Every child run appears in the Hatchet dashboard. You can trace the full reasoning chain: which tools were called, what the LLM returned, where the loop terminated. + +## Agent patterns + + + + The core agent pattern. Reason → act → observe → repeat until done. Includes + the evaluator-optimizer variant. + + + Classify incoming requests with an LLM or rule, then route to a specialist. + + + An orchestrator delegates to specialist workflows. Each specialist has its + own prompt and tools. + + + Fan out independent tool calls or sub-tasks in parallel. Aggregate results + before the agent continues. + + + Pause the agent for human approval. The slot is freed; the agent resumes + when the event arrives. + + + Pipe LLM tokens to frontends in real time. + + diff --git a/frontend/docs/pages/cookbooks/ai-agents/multi-agent.mdx b/frontend/docs/pages/cookbooks/ai-agents/multi-agent.mdx new file mode 100644 index 0000000000..f383a10b40 --- /dev/null +++ b/frontend/docs/pages/cookbooks/ai-agents/multi-agent.mdx @@ -0,0 +1,179 @@ +import { Callout, Cards, Steps, Tabs } from "nextra/components"; +import { snippets } from "@/lib/generated/snippets"; +import { Snippet } from "@/components/code"; +import UniversalTabs from "@/components/UniversalTabs"; + +# Multi-Agent + +Multi-agent orchestration uses a coordinating agent that delegates work to specialist agents. Unlike [routing](/guides/ai-agents/routing) (which classifies once and calls one handler), the orchestrator runs a **reasoning loop**: it may call multiple specialists across multiple iterations, passing results between them, until the overall goal is met. + +Each specialist is a separate [durable task](/v1/patterns/durable-task-execution) with its own prompt, tools, timeout, and retry settings. The orchestrator's LLM decides which specialist to call next based on the accumulated context. + +```mermaid +sequenceDiagram + participant O as Orchestrator + participant R as Research Agent + participant W as Writing Agent + participant C as Code Agent + + O->>O: Decide next step + O->>R: RunChild(research) + R-->>O: Research results + O->>O: Decide next step + O->>C: RunChild(code) + C-->>O: Code snippet + O->>O: Decide next step + O->>W: RunChild(writing) + W-->>O: Final draft + O-->>O: Done +``` + +## When to use + +| Scenario | Fit | +| --------------------------------------------------------------------- | -------------------------------------------------- | +| Complex tasks needing different expertise (research + code + writing) | Good: each specialist focuses on one domain | +| Customer service with support, sales, and billing specialists | Good: orchestrator picks the right expert per turn | +| Tasks where output from one specialist feeds the next | Good: orchestrator passes context between calls | +| Simple tasks a single agent can handle end-to-end | Skip: orchestration overhead isn't worth it | +| Specialists always run in a fixed sequence | Use [LLM Pipelines](/guides/llm-pipelines) instead | + +## How it maps to Hatchet + +The orchestrator is a **[durable task](/v1/patterns/durable-task-execution)** running a reasoning loop via [child spawning](/v1/durable-workflows/child-spawning). Each specialist is a standalone durable task spawned as a child run. The orchestrator's LLM returns structured tool calls, and each tool name maps to a specialist task. + +Because each specialist call is a child run, the orchestrator's slot is freed while specialists execute. If the orchestrator dies mid-loop, it resumes from the last checkpoint without re-running completed specialist calls. + +## Step-by-step walkthrough + + + +### Define the specialist tasks + +Each specialist is a standalone durable task with its own prompt and timeout. They run independently and can be reused across different orchestrators. + + + + + + + + + + + + + + + + +### Orchestrator loop + +The orchestrator runs a [reasoning loop](/guides/ai-agents/reasoning-loop): call LLM, parse tool choice, [spawn specialist](/v1/durable-workflows/child-spawning), observe result, repeat. Context accumulates across iterations so later specialist calls have full history. + + + + + + + + + + + + + + + + +### Run the worker + +Register all specialists and the orchestrator, then start the worker. + + + + + + + + + + + + + + + + + + + + Always set a **max iteration count** and **execution timeout** on the + orchestrator. Without bounds, the loop can call specialists indefinitely. + + +## Multi-agent vs. routing + +| | Multi-agent | Routing | +| -------------------- | ------------------------------------------------- | ----------------------------- | +| **Specialist calls** | Multiple, across loop iterations | One per request | +| **Orchestration** | Reasoning loop, LLM decides next step dynamically | Classify once, route once | +| **Use when** | Task needs multiple types of expertise | Task fits a single specialist | + +## Related Patterns + + + + Multi-agent is a reasoning loop where "tools" are specialist durable tasks + instead of API calls. + + + Classify once and route to one handler. Multi-agent loops and may call many. + + + When multiple specialists can work independently, spawn them in parallel + within a single iteration. + + + The Hatchet primitive used to spawn specialist tasks from the orchestrator. + + + +## Next Steps + +- [Durable Task Execution](/v1/patterns/durable-task-execution): understand checkpointing and replay for the orchestrator +- [Child Spawning](/v1/durable-workflows/child-spawning): spawn specialist tasks from the orchestrator loop +- [Timeouts](/v1/timeouts): set execution timeouts on the orchestrator and each specialist +- [Concurrency Control](/v1/concurrency): limit how many orchestrator runs execute in parallel diff --git a/frontend/docs/pages/cookbooks/ai-agents/parallelization.mdx b/frontend/docs/pages/cookbooks/ai-agents/parallelization.mdx new file mode 100644 index 0000000000..530a1920a3 --- /dev/null +++ b/frontend/docs/pages/cookbooks/ai-agents/parallelization.mdx @@ -0,0 +1,195 @@ +import { Callout, Cards, Steps, Tabs } from "nextra/components"; +import { snippets } from "@/lib/generated/snippets"; +import { Snippet } from "@/components/code"; +import UniversalTabs from "@/components/UniversalTabs"; + +# Parallelization + +Parallelization spawns multiple independent tasks at the same time and aggregates results before continuing. Inside an agent loop, this typically means running several tool calls concurrently when they don't depend on each other. At a system level, it can mean running the same input through multiple evaluators and picking the best (or majority) result. + +Hatchet distributes child runs across all running workers where the task is registered. The parent's slot is [freed while children execute](/v1/durable-workflows/task-eviction), so you don't hold resources during parallel work. + +There are two common variants: + +- **Sectioning**: different tasks handle different concerns in parallel (e.g., content generation + safety check). +- **Voting**: the same task runs N times and results are aggregated by majority vote or best score. + +```mermaid +flowchart LR + subgraph Sectioning + A1[Agent] -->|spawn| S1[Content Generation] + A1 -->|spawn| S2[Safety Check] + S1 --> A2[Aggregate] + S2 --> A2 + end + + subgraph Voting + B1[Agent] -->|spawn| V1[Evaluator 1] + B1 -->|spawn| V2[Evaluator 2] + B1 -->|spawn| V3[Evaluator 3] + V1 --> B2[Majority Vote] + V2 --> B2 + V3 --> B2 + end +``` + +## When to use + +| Scenario | Fit | +| ----------------------------------------------------- | ----------------------------------------------------------------------------- | +| Agent calls 3 independent APIs (weather, news, stock) | Good: no dependencies between calls, latency drops to max of the three | +| Content generation + safety guardrail in parallel | Good: sectioning, both run at once, block if unsafe | +| Multiple evaluators vote on content quality | Good: voting, aggregate for more reliable decisions | +| Processing a batch of items (100+ documents) | Good: see [Batch Processing](/guides/batch-processing) for large-scale fanout | +| Steps depend on each other (output of A feeds B) | Skip: run sequentially | +| Provider rate limits are tight | Careful: parallel calls may hit limits; use [Rate Limits](/v1/rate-limits) | + +## How it maps to Hatchet + +The parent task spawns children via [child spawning](/v1/durable-workflows/child-spawning). Each child runs on any available worker. The parent's slot is [evicted](/v1/durable-workflows/task-eviction) while children execute, so you're not holding resources during the parallel work. When all children complete, the parent resumes and aggregates. + +## Step-by-step walkthrough + + + +### Define the parallel tasks + +Create separate tasks for each concern. These run independently and can be composed in different patterns. + + + + + + + + + + + + + + + + +### Sectioning (parallel concerns) + +Sectioning runs different concerns in parallel. The example generates content and checks safety at the same time. If the safety check fails, the content is blocked even though generation succeeded. + + + + + + + + + + + + + + + + +### Voting (parallel consensus) + +Voting runs the same evaluation N times and aggregates by majority or average score. This produces more reliable decisions than a single evaluation. + + + + + + + + + + + + + + + + +### Run the worker + +Register all tasks and start the worker. + + + + + + + + + + + + + + + + + + + + For large-scale parallelism (hundreds or thousands of items), see the [Batch + Processing](/guides/batch-processing) guide, which covers fan-out with + concurrency control. + + +## Related Patterns + + + + Large-scale fan-out with concurrency limits and progress tracking. + + + Parallelization applies within one iteration of an agent loop when multiple + tools are independent. + + + Combine voting (parallel evaluators) with optimization (feedback loop) for + higher-quality iteration. + + + The Hatchet concept: spawn children in parallel, parent waits for all. + + + +## Next Steps + +- [Child Spawning](/v1/durable-workflows/child-spawning): spawn parallel children from a parent task +- [Task Eviction](/v1/durable-workflows/task-eviction): free the parent's slot while children execute +- [Rate Limits](/v1/rate-limits): throttle parallel calls to external APIs +- [Concurrency Control](/v1/concurrency): limit how many children run simultaneously diff --git a/frontend/docs/pages/cookbooks/ai-agents/reasoning-loop.mdx b/frontend/docs/pages/cookbooks/ai-agents/reasoning-loop.mdx new file mode 100644 index 0000000000..2c00dbd52e --- /dev/null +++ b/frontend/docs/pages/cookbooks/ai-agents/reasoning-loop.mdx @@ -0,0 +1,261 @@ +import { Callout, Cards, Steps, Tabs } from "nextra/components"; +import AgentLoopDiagram from "@/components/AgentLoopDiagramWrapper"; +import LLMIntegrationTabs from "@/components/LLMIntegrationTabs.mdx"; +import { snippets } from "@/lib/generated/snippets"; +import { Snippet } from "@/components/code"; +import UniversalTabs from "@/components/UniversalTabs"; + +# Reasoning Loop + +AI agents follow a **reason-act-observe** loop that can run for minutes or hours, repeating until the LLM determines the task is complete or a deterministic exit condition is met (max iterations, timeout, tool signal). + + + +In Hatchet, this is implemented as a [durable task](/v1/patterns/durable-task-execution) with a loop. At each iteration, the task [spawns a child](/v1/durable-workflows/child-spawning) to call the LLM, execute any tool calls, and determine whether additional iterations are required. Each completed iteration is checkpointed, so the agent survives crashes and worker slots are freed between iterations. + +## When to use + +| Scenario | Fit | +| ----------------------------------------------- | ----------------------------------------------------------------- | +| Chatbot that picks tools based on user messages | Good: the loop runs until the agent has a final answer | +| Multi-step research that may take minutes | Good: durable execution survives long-running loops | +| Agent that needs human approval mid-loop | Good: combine with [Human-in-the-Loop](/guides/human-in-the-loop) | +| Fixed pipeline (prompt, generate, validate) | Skip: use [LLM Pipelines](/guides/llm-pipelines) instead | +| One-shot classification or extraction | Skip: a single [task](/v1/tasks) is simpler | + +## Step-by-step walkthrough + +You'll build a durable agent task that streams tokens and survives restarts. + + + +### Reasoning loop + +Define the core loop. Each iteration calls the LLM, executes any tool calls, and checks whether the task is complete. + + + + + + + + + + + + + + + + +The examples above use a mock LLM. To call a real provider, swap `get_llm_service()` with one of these. Tool execution is typically your own APIs; encapsulate them in a service module like the `get_tool_service()` helper shown above. + + + +### Wrap it in a durable task + +Create a [durable task](/v1/patterns/durable-task-execution) that invokes the reasoning loop from Step 1. Concurrency is set to `CANCEL_IN_PROGRESS` so new user messages cancel stale runs. + + + + + + + + + + + + + + + + +### Stream the response + +Emit LLM tokens from the task as they are generated. Clients subscribe to the stream and receive them in real-time. See [Streaming](/v1/streaming) for the full API. + + + + + + + + + + + + + + + + +### Run the worker + +Start the worker. The task definitions above use `CANCEL_IN_PROGRESS` concurrency so new user messages cancel stale runs. Pass `session_id` in input for per-session grouping. + + + + + + + + + + + + + + + + + + + + Always set a **timeout** and **max iteration count** on agent loops. Without + bounds, an agent can loop indefinitely. See [Timeouts](/v1/timeouts) for + configuration. + + +## Variant: Evaluator-Optimizer + +The evaluator-optimizer is a specialized reasoning loop that uses two LLM calls per iteration: one to **generate** a candidate output and one to **evaluate** it against a rubric. If the score is below a threshold, the evaluator provides feedback and the generator tries again. This trades compute cost for output quality. + +| Use case | Generator | Evaluator | +| ------------------- | ------------------------ | ----------------------------------------------------- | +| **Content writing** | Draft post/email/copy | Score clarity, tone, length; provide edit suggestions | +| **Code generation** | Write function or query | Run tests or linter; feed back errors | +| **Data extraction** | Extract fields from text | Validate against schema; flag missing fields | +| **Translation** | Translate text | Back-translate and compare; score fidelity | + + + +### Define the generator and evaluator tasks + +Create separate tasks for generation and evaluation. The generator takes a topic and optional feedback; the evaluator scores a draft. + + + + + + + + + + + + + + + + +### Optimization loop + +The evaluator-optimizer task loops: generate, evaluate, check score. Each generator and evaluator call is a [spawned child task](/v1/durable-workflows/child-spawning) that is checkpointed on completion. + + + + + + + + + + + + + + + + + + +## Related Patterns + + + + The core loop pattern behind agent reasoning, where a task re-spawns itself + until a goal is met. + + + Pause agents for human feedback or scheduled retries without holding worker + slots. + + + Add approval gates to agent workflows. Pause for human review, then resume. + + + Agents that spawn parallel tool calls or sub-agent tasks. + + + Route agent behavior based on LLM tool call decisions or user preferences. + + + +## Next Steps + +- [What is an Agent?](/guides/ai-agents): overview and pattern index +- [Durable Workflows](/v1/patterns/durable-task-execution): understand checkpointing and replay +- [Streaming](/v1/streaming): set up real-time LLM output streaming +- [Concurrency Control](/v1/concurrency): configure CANCEL_IN_PROGRESS for chat agents diff --git a/frontend/docs/pages/cookbooks/ai-agents/routing.mdx b/frontend/docs/pages/cookbooks/ai-agents/routing.mdx new file mode 100644 index 0000000000..4b0e181076 --- /dev/null +++ b/frontend/docs/pages/cookbooks/ai-agents/routing.mdx @@ -0,0 +1,172 @@ +import { Callout, Cards, Steps, Tabs } from "nextra/components"; +import { snippets } from "@/lib/generated/snippets"; +import { Snippet } from "@/components/code"; +import UniversalTabs from "@/components/UniversalTabs"; + +# Routing + +Routing classifies an incoming request and directs it to a specialist [durable task](/v1/patterns/durable-task-execution). A single entry point handles all requests; the routing logic (an LLM call, a rule-based check, or a keyword match) determines which downstream task runs. Only one branch executes per request. + +This pattern improves response quality because each specialist task has its own prompt, tools, and context optimized for that category. It also simplifies the caller: trigger one task and let the router decide where to send it. + +```mermaid +flowchart TD + R[Router Task] -->|classify| C{Category} + C -->|support| S[Support Task] + C -->|sales| B[Sales Task] + C -->|billing| D[Billing Task] + S --> O[Return result] + B --> O + D --> O +``` + +## When to use + +| Scenario | Fit | +| ------------------------------------------------------ | -------------------------------------------------------------------------- | +| Customer service (support vs. sales vs. billing) | Good: distinct domains with different prompts and tools | +| Document processing (invoice vs. receipt vs. contract) | Good: each type needs different extraction logic | +| Request triage (simple auto-reply vs. complex agent) | Good: avoid expensive agent loops for easy questions | +| All requests follow the same path | Skip: no benefit from routing | +| Routing rules are simple and known at definition time | Use [Parent Conditions](/v1/durable-workflows/conditions) in a DAG instead | + +## How it maps to Hatchet + +The router is a **[durable task](/v1/patterns/durable-task-execution)**. It spawns a classifier [child task](/v1/durable-workflows/child-spawning) (or does classification inline), then spawns the matching specialist as a [child run](/v1/durable-workflows/child-spawning). Since each specialist is a separate durable task, they can have their own [timeouts](/v1/timeouts), [retries](/v1/retry-policies), [rate limits](/v1/rate-limits), and [concurrency](/v1/concurrency) settings. + +Routing decisions are checkpointed. If the worker dies after classification but before the specialist finishes, the router resumes and does not re-classify. + +## Step-by-step walkthrough + + + +### Define the classifier task + +A separate task classifies the incoming message. This lets you observe the classification result and retry independently if the LLM fails. + + + + + + + + + + + + + + + + +### Define the specialist tasks + +Each specialist is a standalone durable task with its own prompt and tools. They run independently with their own timeout and retry settings. + + + + + + + + + + + + + + + + +### Route with a durable task + +The router classifies the message, then spawns the matching specialist. The classification result is checkpointed, so if the worker dies after classifying, it resumes and spawns the specialist without re-classifying. + + + + + + + + + + + + + + + + +### Run the worker + +Register all tasks and start the worker. + + + + + + + + + + + + + + + + + + + + For simple routing based on input fields (not LLM classification), you can + skip the classify task and route directly in the durable task body based on + `input.type` or similar fields. + + +## Routing vs. DAG branching + +| | Routing (durable task) | DAG Parent Conditions | +| --------------------- | ---------------------------------------------- | ----------------------------------------------------- | +| **Branch decided by** | Runtime code (`if`/`else`, LLM call) | Declared conditions on parent task output | +| **Use when** | Category is unknown until an LLM classifies it | Branch criteria are known at workflow definition time | +| **Observability** | Router + specialist appear as separate runs | All branches visible in the DAG graph | + +## Related Patterns + + + + Like routing but with a loop: the orchestrator may call multiple specialists + across iterations. + + + DAG-level branching for when routing rules are fixed at definition time. + + + An agent loop can use routing internally to pick tools per iteration. + + + Fixed-sequence pipelines; route to different pipelines based on input type. + + + +## Next Steps + +- [Durable Task Execution](/v1/patterns/durable-task-execution): understand how the router and specialists checkpoint +- [Child Spawning](/v1/durable-workflows/child-spawning): spawn specialist tasks from the router +- [Timeouts](/v1/timeouts): set execution timeouts on the router and each specialist +- [Retry Policies](/v1/retry-policies): configure retries for classification and specialist tasks diff --git a/frontend/docs/pages/cookbooks/batch-processing.mdx b/frontend/docs/pages/cookbooks/batch-processing.mdx new file mode 100644 index 0000000000..5c336512f2 --- /dev/null +++ b/frontend/docs/pages/cookbooks/batch-processing.mdx @@ -0,0 +1,158 @@ +import { Callout, Cards, Steps, Tabs } from "nextra/components"; +import BatchProcessingDiagram from "@/components/BatchProcessingDiagramWrapper"; +import { snippets } from "@/lib/generated/snippets"; +import { Snippet } from "@/components/code"; +import UniversalTabs from "@/components/UniversalTabs"; + +# Batch Processing + +Batch processing involves running the same operation across a large set of items like images, documents, records, or API calls. We'll structure batch workloads in Hatchet with fan-out, retry, and concurrency control. + + + +At its core, batch processing is [Fanout](/v1/durable-workflows/child-spawning) applied at scale. If your batch also has fixed stages (e.g., validate → transform → load), you can combine it with [Pre-Determined Pipelines](/v1/patterns/directed-acyclic-graphs). + +## Step-by-step walkthrough + +You'll build a parent workflow that fans out to one child task per item and aggregates results. + + + +### Define the parent workflow + +Create a parent workflow that receives a batch of item IDs and spawns one child per item. + + + + + + + + + + + + + + + + +### Process each item + +Each child task processes a single item independently. Failed items are retried according to your retry policy. + + + + + + + + + + + + + + + + +### Run the worker + +Register and start the worker with both parent and child workflows. For large batches, use [durable workflows](/v1/patterns/durable-task-execution) so the parent does not hold a slot while waiting. + + + + + + + + + + + + + + + + + + + + For batches with thousands of items, use **durable workflows** so the parent + task doesn't hold a worker slot while waiting for all children to complete. + See [Durable Workflows](/v1/patterns/durable-task-execution) for details. + + +## Common Patterns + +| Pattern | Description | +| ------------------------- | ----------------------------------------------------------------------------------- | +| **Image processing** | Resize, transcode, or analyze images in parallel across workers | +| **Data enrichment** | Enrich records by calling external APIs (geocoding, company info, email validation) | +| **Report generation** | Generate per-customer reports in parallel, then aggregate into a summary | +| **Database migrations** | Process and migrate records in batches with retry and progress tracking | +| **Notification delivery** | Send emails, SMS, or push notifications to a user list with rate limiting | + +## Related Patterns + + + + The core pattern behind batch processing, spawning N children from a parent. + + + Chain batch processing with multi-stage transforms in a DAG. + + + A specialized batch processing use case for document indexing pipelines. + + + Process paginated results one page at a time with iterative child spawning. + + + +## Next Steps + +- [Child Spawning](/v1/durable-workflows/child-spawning): learn the fan-out API for batch processing +- [Bulk Run](/v1/bulk-run): trigger large batches efficiently +- [Concurrency Control](/v1/concurrency): limit concurrent item processing +- [Rate Limits](/v1/rate-limits): protect external APIs during batch operations diff --git a/frontend/docs/pages/cookbooks/document-processing.mdx b/frontend/docs/pages/cookbooks/document-processing.mdx new file mode 100644 index 0000000000..9ec00fb9d4 --- /dev/null +++ b/frontend/docs/pages/cookbooks/document-processing.mdx @@ -0,0 +1,201 @@ +import { Callout, Cards, Steps, Tabs } from "nextra/components"; +import DocumentProcessingDiagram from "@/components/DocumentProcessingDiagramWrapper"; +import LLMIntegrationTabs from "@/components/LLMIntegrationTabs.mdx"; +import OCRIntegrationTabs from "@/components/OCRIntegrationTabs.mdx"; +import { snippets } from "@/lib/generated/snippets"; +import { Snippet } from "@/components/code"; +import UniversalTabs from "@/components/UniversalTabs"; + +# Document Processing + +PDF and file pipelines are central to AI workflows: parse documents, extract content with OCR or LLMs, validate and enrich the data, and classify or route it. We'll structure these as Hatchet [DAG workflows](/v1/patterns/directed-acyclic-graphs). + + + +Because the stages are fixed (ingest → parse → extract), document pipelines map naturally to [DAG workflows](/v1/patterns/directed-acyclic-graphs). You can add [child spawning](/v1/durable-workflows/child-spawning) within the ingest or parse stage to process multiple documents in parallel. + +## Step-by-step walkthrough + +You'll build a three-stage pipeline (ingest, parse, extract) using mocks so you can run it locally without API keys. + + + +### Define the DAG + +Create a workflow with a fixed pipeline: ingest, parse, extract. Each stage depends on the previous. + + + + + + + + + + + + + + + + +### Parse stage + +The parse task depends on ingest (Step 1). It converts raw content to structured text (use OCR for images, mock for examples). + + + + + + + + + + + + + + + + +The examples above use a mock OCR service. To use a real provider, swap in one of these. Pick a provider, then your language: + + + +### Extract stage + +The extract task depends on parse (Step 2). Use an LLM or rules to extract entities. [Rate Limits](/v1/rate-limits) keep extract tasks within provider quotas. + + + + + + + + + + + + + + + + +The examples above use a mock extractor. To wire in a real LLM for extraction, swap `get_extract_service()` with a provider: + + + +### Run the worker + +Start the worker. For per-document parallelism, use [Child Spawning](/v1/durable-workflows/child-spawning) within the ingest stage. + + + + + + + + + + + + + + + + + + + + When fanning out to many documents, ensure your workers have enough slots or + use [Concurrency Control](/v1/concurrency) to limit how many run + simultaneously. + + +## Common Patterns + +| Pattern | Description | +| --------------------------- | ------------------------------------------------------------------------------------------------------ | +| **Invoice extraction** | Parse invoices, extract line items and totals with LLM, validate amounts, post to ERP | +| **Contract analysis** | Extract clauses and terms, classify risk, route for legal review | +| **Resume parsing** | Parse resumes, extract skills and experience, match to job requisitions | +| **Form processing** | Extract form fields from scans, validate against schemas, submit to backend systems | +| **Document classification** | Classify documents by type, route to appropriate [DAG workflows](/v1/patterns/directed-acyclic-graphs) | + +## Related Patterns + + + + The fixed-stage DAG pattern that document pipelines are built on. + + + When your goal is retrieval (chunk, embed, index) rather than extract and + transform. + + + General-purpose batch patterns that apply to document workloads. + + + Parallelize document processing across your worker fleet. + + + +## Next Steps + +- [DAG Workflows](/v1/patterns/directed-acyclic-graphs): define multi-stage pipelines with task dependencies +- [Rate Limits](/v1/rate-limits): configure rate limiting for OCR and LLM APIs +- [Child Spawning](/v1/durable-workflows/child-spawning): fan out to per-document tasks +- [Webhooks](/v1/webhooks): trigger pipelines from file upload endpoints +- [Concurrency Control](/v1/concurrency): limit parallel document processing diff --git a/frontend/docs/pages/cookbooks/human-in-the-loop.mdx b/frontend/docs/pages/cookbooks/human-in-the-loop.mdx new file mode 100644 index 0000000000..0399e06daa --- /dev/null +++ b/frontend/docs/pages/cookbooks/human-in-the-loop.mdx @@ -0,0 +1,202 @@ +import { Callout, Cards, Steps, Tabs } from "nextra/components"; +import HumanInLoopDiagram from "@/components/HumanInLoopDiagramWrapper"; +import { snippets } from "@/lib/generated/snippets"; +import { Snippet } from "@/components/code"; +import UniversalTabs from "@/components/UniversalTabs"; + +# Human-in-the-Loop + +Human-in-the-loop [durable tasks](/v1/patterns/durable-task-execution) pause for human review or approval before continuing. An AI agent proposes an action, a human approves or rejects it, and the workflow resumes with the decision. When the human responds, the task picks up exactly where it left off. + + + +The task pauses on a [durable event](/v1/durable-workflows/events), freeing the worker slot until the approval arrives. If your workflow is a fixed DAG and the approval gate is known at definition time, [Event Conditions](/v1/durable-workflows/events) are a simpler alternative. It is your responsibility to emit the event from a different part of your application to restore the task. + +## Step-by-step walkthrough + +You'll build a durable task that proposes an action, waits for a human to push an approval event, and resumes with the decision. + + + +### Write a wait-for-approval helper + +Define a helper that calls `WaitForEvent` to pause execution until a human responds. The [CEL expression](/v1/durable-workflows/events#event-filters) filters on the workflow run ID so the event only matches the specific task that is waiting. Hatchet frees the worker slot while the task is suspended and resumes it when the matching event arrives. + + + + + + + + + + + + + + + + +### Define the approval task + +Create a durable task that proposes an action, calls the helper from Step 1, and branches on the result. + + + + + + + + + + + + + + + + +### Push the approval event + +When the human clicks Approve or Reject in your UI, your frontend or API pushes the event to Hatchet. Include the `runId` in the payload so the CEL expression in Step 1 matches it to the correct waiting task. + + + + + + + + + + + + + + + + +### Run the worker + +Register and start the worker. Use [Branching](/v1/durable-workflows/conditions) to route on approve vs reject from the event payload. + + + + + + + + + + + + + + + + + + + + Always set an **execution timeout** on the durable task itself so it does not + wait indefinitely if a human never responds. See [Timeouts](/v1/timeouts) for + configuration. + + +## Common Patterns + +| Pattern | Description | +| ------------------------------- | ----------------------------------------------------------------------- | +| **Content moderation** | Agent flags content; human approves or rejects before publish | +| **Financial approvals** | Agent proposes payment or transfer; human approves via dashboard | +| **Customer support escalation** | Agent drafts response; human reviews and sends, or edits before sending | +| **LLM output review** | Agent generates copy or code; human approves before it goes live | + +For DAG workflows with a fixed approval gate, use [Event Conditions](/v1/durable-workflows/events). For agent loops where the decision to wait is dynamic, use a durable task with `WaitForEvent`. + +## Related Patterns + + + + Pause workflows for external signals (events or sleep) without holding + slots. + + + Agent workflows that may need human approval at decision points. + + + Route workflow behavior based on approve vs reject from the event payload. + + + Declare approval gates in DAG workflows when the wait is known at definition + time. + + + +## Next Steps + +- [Durable Events](/v1/durable-workflows/events): `WaitForEvent` API and event filters +- [Pushing Events](/v1/external-events/pushing-events): push approval events from your frontend or API +- [Event Conditions](/v1/durable-workflows/events): approval gates in DAG workflows +- [Long Waits](/v1/durable-workflows/sleep): general pattern for durable pauses +- [Branching](/v1/durable-workflows/conditions): route on approve vs reject diff --git a/frontend/docs/pages/cookbooks/index.mdx b/frontend/docs/pages/cookbooks/index.mdx new file mode 100644 index 0000000000..d1ffebbe22 --- /dev/null +++ b/frontend/docs/pages/cookbooks/index.mdx @@ -0,0 +1,61 @@ +--- +asIndexPage: true +--- + +import { Callout, Cards } from "nextra/components"; + +# Cookbooks + +End-to-end examples for common use cases, with working code in Python, TypeScript, Go, and Ruby. Each cookbook builds a complete workflow you can run locally and adapt to your own project. + + + These guides assume you have the Hatchet SDK installed and a worker running. + If you haven't done that yet, start with [Get Started](/). + + +## AI + + + + Build agents that reason, act, and observe in a loop. Covers reasoning + loops, routing, multi-agent orchestration, and parallelization. + + + Pause a task for human review or approval, freeing the worker slot until the + decision arrives. The task resumes exactly where it left off. + + + Ingest documents, split into chunks, generate embeddings, and write to a + vector database using a DAG workflow. + + + Chain multiple model calls with validation, retries, and structured outputs. + Each step is a durable task with independent retry and rate-limit controls. + + + +## Data & Processing + + + + Run the same operation across a large set of items with fan-out, retry, and + concurrency control. + + + Parse PDFs and files, extract content with OCR or LLMs, validate, enrich, + and classify using a DAG pipeline. + + + Fetch, process, and store web content with retries, timeouts, rate limits, + and cron scheduling for recurring refreshes. + + + +## Troubleshooting + + + + Debug common issues with Hatchet workers, including connection failures, + tasks stuck in queued state, and phantom workers. + + diff --git a/frontend/docs/pages/cookbooks/llm-pipelines.mdx b/frontend/docs/pages/cookbooks/llm-pipelines.mdx new file mode 100644 index 0000000000..e60664f0e6 --- /dev/null +++ b/frontend/docs/pages/cookbooks/llm-pipelines.mdx @@ -0,0 +1,173 @@ +import { Callout, Cards, Steps, Tabs } from "nextra/components"; +import LLMIntegrationTabs from "@/components/LLMIntegrationTabs.mdx"; +import { snippets } from "@/lib/generated/snippets"; +import { Snippet } from "@/components/code"; +import UniversalTabs from "@/components/UniversalTabs"; + +# LLM Pipelines + +LLM pipelines chain multiple model calls together with validation, retries, and structured outputs. Hatchet turns each step into a durable task so failures retry individually, rate limits protect provider APIs, and the full pipeline is observable in the dashboard. + +Because each LLM call maps to a task and validation steps gate what runs next, these pipelines are a natural fit for [DAG Workflows](/v1/patterns/directed-acyclic-graphs). + +## Step-by-step walkthrough + +You'll build a three-stage DAG pipeline (prompt, generate, validate) using a mock LLM so you can run it without API keys. + + + +### Define the pipeline + +Create a workflow with prompt construction, LLM generation, and validation stages. + + + + + + + + + + + + + + + + +### Prompt task + +The prompt task depends on the pipeline input (Step 1). Build the prompt from user input and context. This step may include retrieval from a vector database (see [RAG & Indexing](/guides/rag-and-indexing)). + + + + + + + + + + + + + + + + +The prompt is passed to your LLM service for generation. The examples above use a mock. To use a real provider, swap `get_llm_service()` with one of these: + + + +### Generate and validate + +This task takes the prompt from Step 2, calls the LLM, and validates the response. If validation fails, [Retry Policies](/v1/retry-policies) retry just this step with a corrective prompt. + + + + + + + + + + + + + + + + +### Run the worker + +Start the worker. Configure [Rate Limits](/v1/rate-limits) to stay within LLM provider quotas. + + + + + + + + + + + + + + + + + + + + Always set **timeouts** on LLM call steps. Model providers can hang or respond + slowly under load. See [Timeouts](/v1/timeouts) for configuration. + + +## Common Patterns + +| Pattern | Description | +| ------------------------ | ------------------------------------------------------------------------- | +| **Generate → Validate** | Call LLM, validate structured output, retry with error context on failure | +| **Chain of thought** | Multi-step reasoning where each LLM call refines the previous output | +| **Parallel evaluation** | Fan out the same prompt to multiple models, then pick the best response | +| **Translation pipeline** | Generate content in one language, translate to others in parallel | +| **Summarize → Classify** | Summarize long text, then classify the summary for routing or tagging | + +## Related Patterns + + + + When LLM calls happen in a dynamic loop rather than a fixed pipeline. + + + Provide context to LLM pipeline steps via retrieval-augmented generation. + + + Extract structured data from documents using LLM-powered pipelines. + + + Run LLM pipelines across many inputs with fan-out and concurrency control. + + + +## Next Steps + +- [DAG Workflows](/v1/patterns/directed-acyclic-graphs): define multi-stage LLM pipelines +- [Rate Limits](/v1/rate-limits): configure rate limiting for LLM providers +- [Retry Policies](/v1/retry-policies): handle transient LLM API errors +- [Streaming](/v1/streaming): stream LLM outputs to frontends in real-time diff --git a/frontend/docs/pages/cookbooks/rag-and-indexing.mdx b/frontend/docs/pages/cookbooks/rag-and-indexing.mdx new file mode 100644 index 0000000000..23141b05ae --- /dev/null +++ b/frontend/docs/pages/cookbooks/rag-and-indexing.mdx @@ -0,0 +1,246 @@ +import { Callout, Cards, Steps, Tabs } from "nextra/components"; +import RAGPipelineDiagram from "@/components/RAGPipelineDiagramWrapper"; +import EmbeddingIntegrationTabs from "@/components/EmbeddingIntegrationTabs.mdx"; +import { snippets } from "@/lib/generated/snippets"; +import { Snippet } from "@/components/code"; +import UniversalTabs from "@/components/UniversalTabs"; + +# RAG & Data Indexing + +RAG and indexing pipelines share a common shape: ingest documents, split them into chunks, generate embeddings, and write to a vector database. Because the stages are known upfront, these pipelines map naturally to a [DAG workflow](/v1/patterns/directed-acyclic-graphs), where each stage is a task and dependencies between stages are declared before execution begins. + + + +You declare the full graph (ingest → chunk → embed → index) and Hatchet executes tasks in order, running independent tasks in parallel automatically. You can add [fanout](/v1/durable-workflows/child-spawning) within the chunking stage to process documents in parallel. + +## Step-by-step walkthrough + +You'll define a workflow, then add tasks for ingesting, chunking, embedding, and querying, all using a mock embedding client so you can run it without API keys. + + + +### Define the workflow + +Define your input type and create an empty [DAG workflow](/v1/patterns/directed-acyclic-graphs). You'll add tasks to this workflow in the following steps. + + + + + + + + + + + + + + + + +### Define the ingest task + +Add a task that ingests documents. A trigger (event, cron, or API call) starts the pipeline with document references. + + + + + + + + + + + + + + + + +### Chunk the documents + +The ingest task (Step 2) fans out to one child per document. Each child splits its document into chunks. Use [child spawning](/v1/durable-workflows/child-spawning) for per-document parallelism. + + + + + + + + + + + + + + + + +### Embed and index + +Define a standalone `embed-chunk` task, then spawn one [child task](/v1/durable-workflows/child-spawning) per chunk from the DAG's `chunk-and-embed` task. Each child runs on any available worker and is individually retryable, so a single embedding failure does not restart the entire batch. [Rate Limits](/v1/rate-limits) throttle embedding API calls across all workers. + + + + + + + + + + + + + + + + +The examples above use a mock embedding client. To use a real provider, swap `get_embedding_service()` with one of these. Pick a provider, then your language: + + + +### Query + +Add a query task that reuses the same `embed-chunk` child task to embed the query, then performs a vector similarity search. In production, replace the empty results with a real vector DB lookup. + + + + + + + + + + + + + + + + +### Run the worker + +Start the worker and register the DAG workflow, the `embed-chunk` child task, and the `rag-query` task. + + + + + + + + + + + + + + + + + + + + When fanning out to many chunks, ensure your workers have enough slots or use + [Concurrency Control](/v1/concurrency) to limit how many run simultaneously. + + +## Multi-Tenant Indexing + +For SaaS applications where multiple tenants share the same pipeline: + +- **GROUP_ROUND_ROBIN concurrency** distributes scheduling fairly so no single tenant monopolizes workers +- **Additional metadata** tags each run with a tenant ID for filtering in the dashboard +- **Priority queues** allow higher-priority indexing jobs to run ahead of lower-priority ones + +## Related Patterns + + + + Declare tasks and dependencies upfront so Hatchet can execute them in order. + + + Parallelize document and chunk processing across your worker fleet. + + + Implement incremental indexing that re-crawls until all changes are + processed. + + + General-purpose batch processing patterns that apply to indexing workloads. + + + Extract and transform documents (invoices, contracts, forms), distinct from + RAG's chunk-and-embed for retrieval. + + + +## Next Steps + +- [DAG Workflows](/v1/patterns/directed-acyclic-graphs): define multi-stage pipelines +- [Rate Limits](/v1/rate-limits): configure rate limiting for embedding APIs +- [Child Spawning](/v1/durable-workflows/child-spawning): fan out to per-document tasks +- [Concurrency Control](/v1/concurrency): fair scheduling for multi-tenant indexing diff --git a/frontend/docs/pages/cookbooks/troubleshooting-workers.mdx b/frontend/docs/pages/cookbooks/troubleshooting-workers.mdx new file mode 100644 index 0000000000..55f0a9292f --- /dev/null +++ b/frontend/docs/pages/cookbooks/troubleshooting-workers.mdx @@ -0,0 +1,59 @@ +import { Tabs, Callout } from "nextra/components"; + +# Troubleshooting Hatchet Workers + +This guide covers common issues when deploying and operating Hatchet workers. + +## Quick debugging checklist + +Before diving into specific issues, run through these checks: + +1. **Verify your API token** — make sure `HATCHET_CLIENT_TOKEN` matches the token generated in the Hatchet dashboard for your tenant. +2. **Check worker logs** — look for connection errors, heartbeat failures, or crash traces in your worker output. +3. **Check the dashboard** — navigate to the Workers tab to see if your worker is registered and healthy. +4. **Confirm network connectivity** — workers need to reach the Hatchet engine over gRPC. Firewalls, VPNs, or missing TLS configuration can block this. +5. **Check SDK version** — ensure your SDK version is compatible with your engine version. Mismatches can cause subtle failures. + +## Could not send task to worker + +If you see this error in the event history of a task, it could mean several things: + +1. The worker is closing its network connection while the task is being sent. This could be caused by the worker crashing or going offline. + +2. The payload is too large for the worker to accept or the Hatchet engine to send. The default maximum payload size is 4MB. Consider reducing the size of the input data or output data of your tasks. + +3. The worker has a large backlog of tasks in-flight on the network connection and is rejecting new tasks. This can occur if workers are geographically distant from the Hatchet engine or if there are network issues causing delays. Hatchet Cloud runs by default in `us-west-2` (Oregon, USA), so consider deploying your workers in a region close to that for the best performance. + + If you are self-hosting, you can increase the maximum backlog size via the `SERVER_GRPC_WORKER_STREAM_MAX_BACKLOG_SIZE` environment variable in your Hatchet engine configuration. The default is 20. + +## No workers visible in dashboard + +If you have deployed workers but they are not visible in the Hatchet dashboard, it is likely that: + +1. Your API token is invalid or incorrect. Ensure that the token you are using to start the worker matches the token generated in the Hatchet dashboard for your tenant. + +2. Worker heartbeats are not reaching the Hatchet engine. You will see noisy logs in the worker output if this is the case. + +## Tasks stuck in QUEUED state + +If tasks remain in the `QUEUED` state and never move to `RUNNING`: + +1. **No workers registered for the task** — check the Workers tab in the dashboard and confirm a worker is registered that handles the task name. If you recently renamed a task, make sure the worker has been restarted with the updated code. + +2. **All worker slots are full** — if every slot is occupied by other tasks, new tasks will wait in the queue. Check worker utilization in the dashboard or increase the [slot count](/v1/workers#slots). + +3. **Concurrency or rate limit is blocking** — if you've configured [concurrency limits](/v1/concurrency) or [rate limits](/v1/rate-limits), tasks may be held back intentionally. Review your configuration. + +## Worker keeps disconnecting + +If your worker repeatedly connects and then drops: + +1. **Resource exhaustion** — the worker process may be running out of memory or CPU and getting killed by the OS or orchestrator (OOM kill). Check system logs and increase resource limits. + +2. **Network instability** — intermittent connectivity between the worker and the Hatchet engine will cause reconnection cycles. Check for packet loss or high latency between the worker and the engine. + +3. **Graceful shutdown not configured** — if your deployment platform sends `SIGTERM` and the worker doesn't handle it, in-flight tasks may be interrupted. Ensure your worker handles shutdown signals and gives tasks time to complete. + +## Phantom workers active in dashboard + +This is often due to workers still running in your deployed environment. We see this most often with very long termination periods for workers, or in local development environments where worker processes are leaking. If you are in a local development environment, you can usually view running Hatchet worker processes via `ps -a | grep worker` (or whatever your entrypoint binary is called) and kill them manually. diff --git a/frontend/docs/pages/cookbooks/web-scraping.mdx b/frontend/docs/pages/cookbooks/web-scraping.mdx new file mode 100644 index 0000000000..9e0f641ef2 --- /dev/null +++ b/frontend/docs/pages/cookbooks/web-scraping.mdx @@ -0,0 +1,220 @@ +import { Callout, Cards, Steps, Tabs } from "nextra/components"; +import { snippets } from "@/lib/generated/snippets"; +import { Snippet } from "@/components/code"; +import UniversalTabs from "@/components/UniversalTabs"; +import ScraperIntegrationTabs from "@/components/ScraperIntegrationTabs.mdx"; + +# Web Scraping + +Web scraping workflows fetch content from external websites, process it, and store the results. Scraping is inherently unreliable: pages change layout, rate limits kick in, requests time out. Scrape tasks need retries, timeouts, and concurrency control. Hatchet provides all three, plus cron scheduling to refresh scraped data on a recurring cadence. + +A typical web scraping pipeline has three parts: + +1. **Scrape**: fetch the page content (HTML, rendered JS, or structured API response) +2. **Process**: extract, transform, or summarize the content (optionally with an LLM) +3. **Schedule**: run the pipeline periodically via a cron workflow + +```mermaid +flowchart LR + C[Cron Trigger] --> S[Scrape Task] + S --> P[Process Task] + P --> D[(Store Results)] +``` + +## Step-by-step walkthrough + +You'll build a scrape task with retries, a processing step, a cron workflow that refreshes your scraped data every 6 hours, and a rate-limited variant to avoid getting blocked. + + + +### Define the scrape task + +Create a task that fetches a URL and returns the content. Set a timeout (pages can hang) and retries (transient failures are common). The examples below use a mock. Swap it for Firecrawl, Playwright, or any HTTP client. + + + + + + + + + + + + + + + + +The examples above use a mock scraper. To use a real scraping provider, swap the mock with one of these. Pick a provider, then your language: + + + +### Process the scraped content + +A separate task extracts or transforms the raw scraped content. This could be simple parsing, or an LLM call to summarize or extract structured data. Keeping it separate lets you retry processing independently from scraping. + + + + + + + + + + + + + + + + +### Schedule recurring scrapes + +Wrap the pipeline in a cron workflow to refresh data on a schedule. The example below runs every 6 hours and scrapes a list of URLs. Each scrape + process pair runs as child tasks, so failures on one URL don't block the others. + + + + + + + + + + + + + + + + +### Add rate limiting + +Target sites will block you if you send too many requests. Create a separate rate-limited scrape task that caps requests to a fixed number per minute across all workers. Hatchet holds back task executions that would exceed the limit, so you stay within budget without adding sleep logic in your code. See [Rate Limits](/v1/rate-limits) for details. + + + + + + + + + + + + + + + + +### Run the worker + +Register all tasks (including the rate-limited variant) and upsert the rate limit before starting the worker. The cron schedule activates when the worker connects. + + + + + + + + + + + + + + + + + + + + Always set **timeouts** and **retries** on scrape tasks. Pages can hang + indefinitely, and transient network failures are common. See + [Timeouts](/v1/timeouts) and [Retry Policies](/v1/retry-policies). + + +## Common Patterns + +| Pattern | Description | +| ----------------------- | ---------------------------------------------------------------------------------------------- | +| **Price monitoring** | Scrape competitor pricing pages on a schedule; alert on changes | +| **Content aggregation** | Scrape multiple news sources; use LLM to deduplicate and summarize | +| **SEO monitoring** | Scrape your own pages to verify meta tags, headings, and content | +| **Lead enrichment** | Scrape company websites to enrich CRM records with latest info | +| **Documentation sync** | Scrape external docs; chunk and embed for RAG (see [RAG & Indexing](/guides/rag-and-indexing)) | +| **Compliance checking** | Scrape regulatory pages; alert when content changes | + +## Related Patterns + + + + Cron expressions and one-time scheduled runs for periodic scraping. + + + Fan out scrapes across many URLs in parallel with concurrency control. + + + Chunk and embed scraped content for retrieval-augmented generation. + + + Extract structured data from scraped documents with OCR and LLM pipelines. + + + +## Next Steps + +- [Cron Triggers](/v1/cron-runs): cron expression syntax and configuration +- [Retry Policies](/v1/retry-policies): handle transient scraping failures +- [Rate Limits](/v1/rate-limits): throttle requests to avoid being blocked +- [Concurrency Control](/v1/concurrency): limit parallel scrapes per domain diff --git a/frontend/docs/pages/home/_meta.js b/frontend/docs/pages/home/_meta.js deleted file mode 100644 index 8e4771d82a..0000000000 --- a/frontend/docs/pages/home/_meta.js +++ /dev/null @@ -1,143 +0,0 @@ -export default { - "--intro": { - title: "Why Hatchet?", - type: "separator", - }, - index: "🪓 Welcome", - architecture: "Architecture", - "guarantees-and-tradeoffs": "Guarantees & Tradeoffs", - "--quickstart": { - title: "Setup", - type: "separator", - }, - "hatchet-cloud-quickstart": "Hatchet Cloud Quickstart", - setup: "Advanced Setup", - "coding-agents": "Using Coding Agents", - "--guide": { - title: "Fundamentals", - type: "separator", - }, - "your-first-task": "Tasks", - workers: "Workers", - "running-your-task": "Running Tasks", - environments: "Environments", - "--running-tasks": { - title: "Ways of Running Tasks", - type: "separator", - }, - "running-tasks": "Running Tasks", - "run-with-results": "Run and Wait Trigger", - "run-no-wait": "Run Without Wait Trigger", - "scheduled-runs": "Scheduled Trigger", - "cron-runs": "Cron Trigger", - "run-on-event": "Event Trigger", - "bulk-run": "Bulk Run Many", - webhooks: "Webhooks", - "inter-service-triggering": "Inter-Service Triggering", - "--deploying-workers": { - title: "Deploying Workers", - type: "separator", - }, - docker: "Running with Docker", - "troubleshooting-workers": "Troubleshooting", - compute: { - title: "Managed Compute", - type: "page", - display: "hidden", - }, - "worker-healthchecks": "Worker Health Checks", - "autoscaling-workers": "Autoscaling Workers", - "--flow-control": { - title: "Flow Control", - type: "separator", - }, - concurrency: "Concurrency", - "rate-limits": "Rate Limits", - priority: "Priority", - "--advanced-workflows": { - title: "Workflows", - type: "separator", - }, - orchestration: "Task Orchestration", - dags: { - title: "Directed Acyclic Graphs (DAGs)", - }, - "conditional-workflows": "Conditional Workflows", - "on-failure-tasks": "On Failure Tasks", - "child-spawning": { - title: "Child Spawning", - }, - "additional-metadata": { - title: "Additional Metadata", - }, - "--durable-execution": { - title: "Durable Execution", - type: "separator", - }, - "durable-execution": { - title: "Durable Execution", - }, - "durable-events": { - title: "Durable Events", - }, - "durable-sleep": { - title: "Durable Sleep", - }, - "durable-best-practices": { - title: "Best Practices", - }, - "--error-handling": { - title: "Error Handling", - type: "separator", - }, - timeouts: "Timeouts", - "retry-policies": "Retry Policies", - "bulk-retries-and-cancellations": "Bulk Retries and Cancellations", - - "--assignment": { - title: "Advanced Assignment", - type: "separator", - }, - "sticky-assignment": "Sticky Assignment", - "worker-affinity": "Worker Affinity", - "manual-slot-release": "Manual Slot Release", - "--observability": { - title: "Observability", - type: "separator", - }, - logging: "Logging", - opentelemetry: "OpenTelemetry", - "prometheus-metrics": "Prometheus Metrics", - "--advanced-tasks": { - title: "Advanced Task Features", - type: "separator", - }, - cancellation: { - title: "Cancellation", - }, - streaming: { - title: "Streaming", - }, - middleware: { - title: "Middleware & Dependency Injection", - }, - "--v1-migration-guides": { - title: "V1 Migration Guides", - type: "separator", - }, - "v1-sdk-improvements": { - title: "SDK Improvements", - }, - "migration-guide-engine": "Engine Migration Guide", - "migration-guide-python": "Python Migration Guide", - "migration-guide-typescript": "Typescript Migration Guide", - "migration-guide-go": "Go Migration Guide", - "--python": { - title: "Python Specifics", - type: "separator", - }, - asyncio: "Asyncio", - pydantic: "Pydantic", - lifespans: "Lifespans", - dataclasses: "Dataclass Support", -}; diff --git a/frontend/docs/pages/home/architecture.mdx b/frontend/docs/pages/home/architecture.mdx deleted file mode 100644 index 4014b9024a..0000000000 --- a/frontend/docs/pages/home/architecture.mdx +++ /dev/null @@ -1,99 +0,0 @@ -# Architecture - -## Overview - -Hatchet's architecture is designed around simplicity and reliability. At its core, Hatchet consists of three main components: the **Engine**, the **API Server**, and **Workers**. State is managed durably and efficiently, eliminating the need for additional message brokers or distributed systems. - -Whether you use [Hatchet Cloud](https://cloud.onhatchet.run) or self-host, the architecture remains consistent, allowing seamless migration between deployment models as your needs evolve. - -```mermaid -graph LR - subgraph "External (Optional)" - EXT[Webhooks
Events] - end - - subgraph "Your Infrastructure" - APP[Your API, App, Service, etc.] - W[Workers] - end - - subgraph "Hatchet" - API[API Server] - ENG[Engine] - DB[(Database)] - end - - EXT --> API - APP <--> API - API --> ENG - ENG <--> DB - API <--> DB - ENG <-.->|gRPC| W - - classDef userInfra fill:#e3f2fd,stroke:#1976d2,stroke-width:2px,color:#0d47a1 - classDef hatchet fill:#f1f8e9,stroke:#388e3c,stroke-width:2px,color:#1b5e20 - classDef external fill:#fff8e1,stroke:#f57c00,stroke-width:2px,color:#e65100 - - class APP,W userInfra - class API,ENG,DB hatchet - class EXT external -``` - -## Core Components - -### Engine - -The **Hatchet Engine** orchestrates the entire workflow execution process. It determines when and where tasks should run based on complex dependencies, concurrency limits, and worker availability. Key responsibilities include: - -- **Task Scheduling**: Intelligent routing based on worker capacity and constraints -- **Queue Management**: Sophisticated priority, rate limiting, and fairness algorithms -- **Flow Control**: Enforces concurrency limits, rate limits, and routing rules -- **Retry Logic**: Automatic handling of failures, timeouts, and backpressure -- **Cron Processing**: Manages scheduled workflow executions - -Communication with workers are handled through bidirectional gRPC connections that enable real-time task dispatch and status updates with minimal latency and network overhead. The Hatchet engine continuously tracks task execution state and automatically handles retries, timeouts, and failure scenarios without manual intervention. - -The Engine is designed to be horizontally scalable—multiple engine instances can run simultaneously, coordinating through the persistent storage layer to handle increasing workloads seamlessly. - -### API Server - -The **API Server** serves as the primary interface for viewing Hatchet resources. It exposes REST endpoints that the Hatchet UI and your applications use to: - -- **Trigger Workflows**: Start new workflow executions with input data -- **Query or Subscribe to Status**: Check workflow and task execution status -- **Manage Resources**: Configure workflows, schedules, and settings -- **Webhook Ingestion**: Receive and process external events - -Security is handled through multi-tenant authentication with API keys and JWT tokens, or webhook signature verification where applicable, to ensure only authentic requests are processed. The API Server also powers Hatchet's web dashboard through REST endpoints, giving you real-time visibility into your workflows. - -### Workers - -**Workers** are your application processes that execute the actual business logic. They establish secure, bidirectional gRPC connections to the Engine and run your functions when tasks are dispatched. Workers continuously report status updates, including task progress, logs, and results, giving you real-time visibility into execution. - -When tasks need to be cancelled, workers handle this gracefully with proper cleanup procedures. One of Hatchet's key design goals is deployment flexibility: workers can run anywhere, from containers to VMs or even your local development machine. This flexibility means you can start development locally, deploy to staging in containers, and run production workloads on dedicated infrastructure without changing your worker code. - -You can run either homogeneous or heterogeneous workers. Homogeneous workers are a single type of worker that is used for all tasks. Heterogeneous workers are a mix of different types of workers that are used for different tasks. - -Heterogeneous workers can also be polyglot, meaning they can run multiple languages. For example, you can run a Python worker, a Go worker, and a TypeScript worker which can all be invoked from the same service application. - -### Persistent Storage & Inter-Service Communication - -The platform maintains durable state for all aspects of workflow execution, including task queue state for queued, running, and completed tasks. Workflow definitions with their dependencies, configuration, and metadata are stored persistently, ensuring your orchestration logic survives system restarts. - -In [self-hosted deployments](../self-hosting), this can be a single PostgreSQL database, or for high-throughput workloads you can use RabbitMQ for inter-service communication. In [Hatchet Cloud](https://hatchet.run), this is managed for you with enterprise-grade reliability and performance, handling backups, scaling, and maintenance automatically. - -## Design Philosophy - -Hatchet prioritizes simplicity over complexity: - -- **PostgreSQL foundation** - Built on PostgreSQL with optional RabbitMQ for high-throughput workloads -- **Stateless services** - Engine and API scale horizontally -- **Worker flexibility** - Deploy anywhere, any language (Python/TypeScript/Go), independent scaling - -## Next Steps - -**[Guarantees & Tradeoffs](./guarantees-and-tradeoffs.mdx)** - Learn about Hatchet's guarantees, limitations, and performance characteristics. - -**[Quick Start](./setup.mdx)** - Set up your first Hatchet worker. - -**[Self Hosting](../self-hosting)** - Deploy the Hatchet platform on your own infrastructure. diff --git a/frontend/docs/pages/home/child-spawning.mdx b/frontend/docs/pages/home/child-spawning.mdx deleted file mode 100644 index c47819f97d..0000000000 --- a/frontend/docs/pages/home/child-spawning.mdx +++ /dev/null @@ -1,140 +0,0 @@ -import { Callout, Card, Cards, Steps, Tabs } from "nextra/components"; -import UniversalTabs from "@/components/UniversalTabs"; -import { Snippet } from "@/components/code"; -import { snippets } from "@/lib/generated/snippets"; - -# Procedural Child Task Spawning - -Hatchet supports the dynamic creation of child tasks during a parent task's execution. This powerful feature enables: - -- **Complex, reusable task hierarchies** - Break down complex tasks into simpler, reusable components -- **Fan-out parallelism** - Scale out to multiple parallel tasks dynamically -- **Dynamic task behavior** - Create loops and conditional branches at runtime -- **Agent-based tasks** - Support AI agents that can create new tasks based on analysis results or loop until a condition is met - -## Creating Parent and Child Tasks - -To implement child task spawning, you first need to create both parent and child task definitions. - - - - -First, we'll declare a couple of tasks for the parent and child: - - - -We also created a step on the parent task that spawns the child tasks. Now, we'll add a couple of steps to the child task: - - - -And that's it! The fanout parent will run and spawn the child, and then will collect the results from its steps. - - - - - - - - - - - - - - - - - - -## Running Child Tasks - -To spawn and run a child task from a parent task, use the appropriate method for your language: - - - - - - - - - - - - - - - - - - - - - - -## Parallel Child Task Execution - -As shown in the examples above, you can spawn multiple child tasks in parallel: - - - - - - - - - - - - - - - - - - - - - - -## Use Cases for Child Workflows - -Child workflows are ideal for: - -1. **Dynamic fan-out processing** - When the number of parallel tasks is determined at runtime -2. **Reusable workflow components** - Create modular workflows that can be reused across different parent workflows -3. **Resource-intensive operations** - Spread computation across multiple workers -4. **Agent-based systems** - Allow AI agents to spawn new workflows based on their reasoning -5. **Long-running operations** - Break down long operations into smaller, trackable units of work - -## Error Handling with Child Workflows - -When working with child workflows, it's important to properly handle errors. Here are patterns for different languages: - - - - - - - - - - - - - - - - - - - - - diff --git a/frontend/docs/pages/home/conditional-workflows.mdx b/frontend/docs/pages/home/conditional-workflows.mdx deleted file mode 100644 index 0b9cfe7987..0000000000 --- a/frontend/docs/pages/home/conditional-workflows.mdx +++ /dev/null @@ -1,225 +0,0 @@ -import { Snippet } from "@/components/code"; -import { snippets } from "@/lib/generated/snippets"; - -import { Callout, Card, Cards, Steps, Tabs } from "nextra/components"; -import UniversalTabs from "@/components/UniversalTabs"; - -## Introduction - -Hatchet V1 introduces the ability to add conditions to tasks in your workflows that determine whether or not a task should be run, based on a number of conditions. Conditions unlock a number of new ways to solve problems with Hatchet, such as: - -1. A workflow that reads a feature flag, and then decides how to progress based on its value. In this case, you'd have two tasks that use parent conditions, where one task runs if the flag value is e.g. `True`, while the other runs if it's `False`. -2. Any type of human-in-the-loop workflow, where you want to wait for a human to e.g. approve something before continuing the dag. - -## Types of Conditions - -There are three types of `Condition`s in Hatchet V1: - -1. Sleep conditions, which sleep for a specified duration before continuing -2. Event conditions, which wait for an event (and optionally a CEL expression evaluated on the payload of that event) before deciding how to continue -3. Parent conditions, which wait for a parent task to complete and then decide how to progress based on its output. - -## Or Groups - -Conditions can also be combined using an `Or` operator into groups of conditions (called "or groups") where at least one must be satisfied in order for the group to evaluate to `True`. An "or group" behaves like a boolean `OR` operator, where the group evaluates to `True` if at least one of its conditions is `True`. - -Or groups are an extremely powerful feature because they let you express arbitrarily complex sets of conditions in [conjunctive normal form](https://en.wikipedia.org/wiki/Conjunctive_normal_form) (CNF) for determining when your tasks should run and when they should not. As a simple example, consider the following conditions: - -- **Condition A**: Checking if the output of a parent task is greater than 50 -- **Condition B**: Sleeping for 30 seconds -- **Condition C**: Receiving the `payment:processed` event - -You might want to progress in your workflow if A _or_ B and C. In this case, we can express this set of conditions in CNF as `A or B` AND `A or C` where both `A or B` and `A or C` are or groups. - -## Usage - -Conditions can be used at task _declaration_ time in three ways: - -1. They can be used in a `wait_for` fashion, where a task will wait for the conditions to evaluate to `True` before being run. -2. They can be used in a `skip_if` fashion, where a task will be skipped if the conditions evaluate to `True`. -3. They can be used in a `cancel_if` fashion, where a task will be cancelled if the conditions evaluate to `True`. - -### `wait_for` - -Declaring a task with conditions to `wait_for` will cause the task to wait before starting for until its conditions evaluate to `True`. For instance, if you use `wait_for` with a 60 second sleep, the workflow will wait for 60 seconds before triggering the task. Similar, if the task is waiting for an event, it will wait until the event is fired before continuing. - -### `skip_if` - -Declaring a task with conditions to `skip_if` will cause the task to be skipped if the conditions evaluate to `True`. For instance, if you use a parent condition to check if the output of a parent task is equal to some value, the task will be skipped if that condition evaluates to `True`. - -### `cancel_if` - -Declaring a task with conditions to `cancel_if` will cause the task to be cancelled if the conditions evaluate to `True`. For instance, if you use a parent condition to check if the output of a parent task is equal to some value, the task will be cancelled if that condition evaluates to `True`. - - - A task cancelled by a `cancel_if` operator will behave the same as any other - cancellation in Hatchet, meaning that downstream tasks will be cancelled as - well. - - -## Example Workflow - -In this example, we're going to build the following workflow: - -![Branching DAG Workflow](/branching-dag.png) - -Note the branching logic (`left_branch` and `right_branch`), as well as the use of skips and waits. - -To get started, let's declare the workflow. - - - - - - - - - - - - - - - - -Next, we'll start adding tasks to our workflow. First, we'll add a basic task that outputs a random number: - - - - - - - - - - - - - - - - -Next, we'll add a task to the workflow that's a child of the first task, but it has a `wait_for` condition that sleeps for 10 seconds. - - - - - - - - - - - - - - - - -This task will first wait for the parent task to complete, and then it'll sleep for 10 seconds before executing and returning another random number. - -Next, we'll add a task that will be skipped on an event: - - - - - - - - - - - - - - - - -In this case, our task will wait for a 30 second sleep, and then it will be skipped if the `skip_on_event:skip` is fired. - -Next, let's add some branching logic. Here we'll add two more tasks, a left and right branch. - - - - - - - - - - - - - - - - -These two tasks use the `ParentCondition` and `skip_if` together to check if the output of an upstream task was greater or less than `50`, respectively. Only one of the two tasks will run: whichever one's condition evaluates to `True`. - -Next, we'll add a task that waits for an event: - - - - - - - - - - - - - - - - -And finally, we'll add the last task, which collects all of its parents and sums them up. - - - - - -Note that in this task, we rely on `ctx.was_skipped` to determine if a task was skipped. - - - - - - - - - - - - - - -This workflow demonstrates the power of the new conditional logic in Hatchet V1. You can now create complex workflows that are much more dynamic than workflows in the previous version of Hatchet, and do all of it declaratively (rather than, for example, by dynamically spawning child workflows based on conditions in the parent). diff --git a/frontend/docs/pages/home/durable-best-practices.mdx b/frontend/docs/pages/home/durable-best-practices.mdx deleted file mode 100644 index d4f064b065..0000000000 --- a/frontend/docs/pages/home/durable-best-practices.mdx +++ /dev/null @@ -1,19 +0,0 @@ -## Durable Execution Best Practices - -Durable tasks require a bit of extra work to ensure that they are not misused. An important concept in running a durable task is that the task should be **deterministic**. This means that the task should always perform the same sequence of operations in between retries. - -The deterministic nature of durable tasks is what allows Hatchet to replay the task from the last checkpoint. If a task is not deterministic, it may produce different results on each retry, which can lead to unexpected behavior. - -## Maintaining Determinism - -By following a few simple rules, you can ensure that your durable tasks are deterministic: - -1. **Only call methods available on the `DurableContext`**: a very common way to introduce non-determinism is to call methods within your application code which produces side effects. If you need to call a method in your application code which fetches data from a database, calls any sort of i/o operation, or otherwise interacts with other systems, you should spawn those tasks as a **child task** or **child workflow** using `RunChild`. - -2. **When updating durable tasks, always guarantee backwards compatibility**: if you change the order of operations in a durable task, you may break determinism. For example, if you call `SleepFor` followed by `WaitFor`, and then change the order of those calls, Hatchet will not be able to replay the task correctly. This is because the task may have already been checkpointed at the first call to `SleepFor`, and if you change the order of operations, the checkpoint is meaningless. - -## Using DAGs instead of durable tasks - -[DAGs](./dags) are generally a much easier, more intuitive way to run a durable, deterministic workflow. DAGs are inherently deterministic, as their shape of work is predefined, and they cache intermediate results. If you are running simple workflows that can be represented as a DAG, you should use DAGs instead of durable tasks. DAGs also have conditional execution primitives which match the behavior of `SleepFor` and `WaitFor` in durable tasks. - -Durable tasks are useful if you need to run a workflow that is not easily represented as a DAG. diff --git a/frontend/docs/pages/home/durable-events.mdx b/frontend/docs/pages/home/durable-events.mdx deleted file mode 100644 index 6c49b830ec..0000000000 --- a/frontend/docs/pages/home/durable-events.mdx +++ /dev/null @@ -1,60 +0,0 @@ -import { snippets } from "@/lib/generated/snippets"; -import { Snippet } from "@/components/code"; -import { Callout, Card, Cards, Steps, Tabs } from "nextra/components"; -import UniversalTabs from "@/components/UniversalTabs"; - -## Durable Events - -Durable events are a feature of **durable tasks** which allow tasks to wait for an event to occur before continuing. This is useful in cases where a task needs to wait for a long time for an external action. Durable events are useful, because even if your task is interrupted and requeued while waiting for an event, the event will still be processed. When the task is resumed, it will read the event from the durable event log and continue processing. - -## Declaring durable events - -Durable events are declared using the context method `WaitFor` (or utility method `WaitForEvent`) on the `DurableContext` object. - - - - - - - - - - - - - - - - - - - - - - -## Durable event filters - -Durable events can be filtered using [CEL](https://github.com/google/cel-spec) expressions. For example, to only receive `user:update` events for a specific user, you can use the following filter: - - - - - - - - - - - - - - - - - - - - - diff --git a/frontend/docs/pages/home/durable-execution.mdx b/frontend/docs/pages/home/durable-execution.mdx deleted file mode 100644 index 7c1985935c..0000000000 --- a/frontend/docs/pages/home/durable-execution.mdx +++ /dev/null @@ -1,60 +0,0 @@ -import { snippets } from "@/lib/generated/snippets"; -import { Snippet } from "@/components/code"; - -import { Callout } from "nextra/components"; - -# Durable Execution - -## Introduction - -**Durable execution** refers to the ability of a function to easily recover from failures or interruptions. In Hatchet, we refer to a function with this ability as a **durable task**. Durable tasks are essentially tasks that store intermediate results in a durable event log - in other words, they're a fancy cache. - - - For an in-depth look at how durable execution works, have a look at [this blog - post](https://hatchet.run/blog/durable-execution). - - -This is especially useful in cases such as: - -1. Tasks which need to always run to completion, even if the underlying machine crashes or the task is interrupted. -2. Situations where a task needs to wait for an very long amount of time for something to complete before continuing. Running a durable task will not take up a slot on the main worker, so is a strong candidate for e.g. fanout tasks that spawn a large number of children and then wait for their results. -3. Waiting for a potentially long time for an event, such as human-in-the-loop tasks where we might not get human feedback for hours or days. - -## How Hatchet Runs Durable Tasks - -Durable tasks run on the same worker process as regular tasks, but they consume a separate slot type so they do not compete with regular tasks for slots. This pattern prevents deadlock scenarios where durable tasks would starve children tasks for slots which are needed for the parent durable task to complete. - -Tasks that are declared as being durable (using `durable_task` instead of `task`), will receive a `DurableContext` object instead of a normal `Context,` which extends the `Context` by providing some additional tools for working with durable execution features. - -## Example Task - -Now that we know a bit about how Hatchet handles durable execution, let's build a task. We'll start by declaring a task that will run durably. - - - -Here, we've declared a Hatchet task just like any other. Now, we can add some tasks to it: - - - -We've added two tasks to our workflow. The first is a normal, "ephemeral" task, which does not leverage any of Hatchet's durable features. - -Second, we've added a durable task, which we've created by using the `durable_task` method of the `Workflow`, as opposed to the `task` method. - - - Note that the `durable_task` we've defined takes a `DurableContext`, as - opposed to a regular `Context`, as its second argument. The `DurableContext` - is a subclass of the regular `Context` that adds some additional methods for - working with durable tasks. - - -The durable task first waits for a sleep condition. Once the sleep has completed, it continues processing until it hits the second `wait_for`. At this point, it needs to wait for an event condition. Once it receives the event, the task prints `Event received` and completes. - -If this task is interrupted at any time, it will continue from where it left off. But more importantly, if an event comes into the system while the task is waiting, the task will immediately process the event. And if the task is interrupted while in a sleeping state, it will respect the original sleep duration on restart -- that is, if the task calls `ctx.aio_sleep_for` for 24 hours and is interrupted after 23 hours, it will only sleep for 1 more hour on restart. - -### Or Groups - -Similarly to in [conditional workflows](./conditional-workflows.mdx#or-groups), durable tasks can also use or groups in the wait conditions they use. For example, you could wait for either an event or a sleep (whichever comes first) like this: - - diff --git a/frontend/docs/pages/home/durable-sleep.mdx b/frontend/docs/pages/home/durable-sleep.mdx deleted file mode 100644 index 70abcf789e..0000000000 --- a/frontend/docs/pages/home/durable-sleep.mdx +++ /dev/null @@ -1,35 +0,0 @@ -import { snippets } from "@/lib/generated/snippets"; -import { Snippet } from "@/components/code"; -import { Callout, Card, Cards, Steps, Tabs } from "nextra/components"; -import UniversalTabs from "@/components/UniversalTabs"; - -## Durable Sleep - -Durable sleep is a feature of **durable tasks** which allow tasks to pause execution for a specified amount of time. Instead of a regular `sleep` call in your task, durable sleep is guaranteed to only sleep for the specified amount of time after the first time it was called. - -For example, say you'd like to send a notification to a user after 24 hours. With a regular `sleep`, if the task is interrupted after 23 hours, it will restart and call `sleep` for 24 hours again. This means that the task will sleep for 47 hours in total, which is not what you want. With durable sleep, the task will respect the original sleep duration on restart -- that is, if the task calls `ctx.aio_sleep_for` for 24 hours and is interrupted after 23 hours, it will only sleep for 1 more hour on restart. - -## Using durable sleep - -Durable sleep can be used by calling the `SleepFor` method on the `DurableContext` object. This method takes a duration as an argument and will sleep for that duration. - - - - - - - - - - - - - - - - - - - - - diff --git a/frontend/docs/pages/home/guarantees-and-tradeoffs.mdx b/frontend/docs/pages/home/guarantees-and-tradeoffs.mdx deleted file mode 100644 index 6664227b24..0000000000 --- a/frontend/docs/pages/home/guarantees-and-tradeoffs.mdx +++ /dev/null @@ -1,135 +0,0 @@ -import { Callout } from "nextra/components"; - -# Guarantees & Tradeoffs - -Hatchet is designed as a modern task orchestration platform that bridges the gap between simple job queues and complex workflow engines. Understanding where it excels—and where it doesn't—will help you determine if it's the right fit for your needs. - -### Good Fit - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- Real-time Requests - Sub-25ms task dispatch for hot - workers with thousands of concurrent tasks -
- Workflow Orchestration with dependencies and error - handling -
- Reliable Task Processing where durability matters -
- Moderate Throughput (hundreds to low 10,000s of - tasks/second) -
- Multi-Language Workers or polyglot teams -
- Operational Simplicity if your team is already using - PostgreSQL -
- Cloud or Air-Gapped Environments for flexible - deployment options ( - Hatchet Cloud and{" "} - self-hosting) -
- -### Not a Good Fit - - - - - - - - - - - - - - - - - - - - -
- Extremely High Throughput (consistently 10,000+ - tasks/second) -
- Sub-Millisecond Latency requirements -
- Memory-Only Queuing where persistence or durability - isn't needed -
- Serverless Environments on cloud providers like AWS - Lambda, Google Cloud Functions, or Azure Functions -
- -## Core Reliability Guarantees - -Hatchet is designed with the following core reliability guarantees: - -**Every task will execute at least once.** Hatchet ensures that no task gets lost, even during system failures, network outages, or deployments. Failed tasks automatically retry according to your configuration, and all tasks persist through restarts and network issues. - -**Consistent state management.** All workflow state changes happen within PostgreSQL transactions, ensuring that your workflow dependencies resolve consistently and no tasks are lost during failures or deployments. - -**Predictable execution order.** The default task assignment strategy is First In First Out (FIFO) which can be modified with [concurrency policies](./concurrency.mdx), [rate limits](./rate-limits.mdx), and [priorities](./priority.mdx). - -**Operational resilience.** The engine and API servers are stateless, allowing them to restart without losing state and enabling horizontal scaling by simply adding more instances. Workers automatically reconnect after network issues and can be deployed anywhere—containers, VMs, or local development environments. - -## Performance Expectations - -Understanding Hatchet's performance characteristics helps you plan your implementation and set realistic expectations. - -**Typical time-to-start latency** for task dispatch is sub 50ms with PostgreSQL storage, though this can be optimized to ~25ms P95 for hot workers in optimized setups. Network latency between your workers and the Hatchet engine will directly impact dispatch times, so consider deployment topology when latency matters. - -**Throughput capacity** varies significantly based on your setup. A single engine instance with PostgreSQL-only storage typically handles hundreds of tasks per second. When you need higher throughput, adding RabbitMQ as a message queue can substantially increase capacity, though your database will eventually become the bottleneck at very high scales. Through tuning and sharding, we can support throughputs of tens of thousands of tasks per second. - -**Concurrent processing** scales well — Hatchet supports thousands of concurrent workers, with worker-level concurrency controlled through slot configuration. The depth of your queues is limited by your database storage capacity rather than memory constraints. - -**Performance optimization** comes through several strategies: RabbitMQ for high-throughput workloads, read replicas for analytics queries, connection pooling with tools like PgBouncer, and shorter retention periods for execution history. Conversely, performance can be limited by database connection limits, large task payloads (over 1MB), complex dependency graphs, and cross-region network latency. - - - -**Not seeing expected performance?** - -If you're not seeing the performance you expect, please [reach out to us](https://hatchet.run/office-hours) or [join our community](https://hatchet.run/discord) to explore tuning options. - - - -## Ready to Get Started? - -Now that you understand Hatchet's capabilities and limitations, explore the technical details: - -**[Quick Start](./setup.mdx)** - Set up your first Hatchet worker. - -**[Self-Hosting](../self-hosting)** - Learn how to deploy Hatchet on your own infrastructure with appropriate sizing for your needs. diff --git a/frontend/docs/pages/home/on-failure-tasks.mdx b/frontend/docs/pages/home/on-failure-tasks.mdx deleted file mode 100644 index c3cd00d4e7..0000000000 --- a/frontend/docs/pages/home/on-failure-tasks.mdx +++ /dev/null @@ -1,43 +0,0 @@ -import { snippets } from "@/lib/generated/snippets"; -import { Snippet } from "@/components/code"; -import { Callout, Card, Cards, Steps, Tabs } from "nextra/components"; -import UniversalTabs from "@/components/UniversalTabs"; - -# On-Failure Tasks - -The on-failure task is a special type of task in Hatchet that allows you to define a function to be executed in the event that any task in the main task fails. This feature enables you to handle errors, perform cleanup tasks, or trigger notifications in case of task failure within a workflow. - -## Defining an on-failure task - -You can define an on-failure task on your task the same as you'd define any other task: - - - - - - Note: Only one on-failure task can be defined per workflow. - - - - - - - - - - - - - -In the examples above, the on-failure task will be executed only if any of the main tasks in the workflow fail. - -## Use Cases - -Some common use cases for the on-failure task include: - -- Performing cleanup tasks after a task failure in a workflow -- Sending notifications or alerts about the failure -- Logging additional information for debugging purposes -- Triggering a compensating action or a fallback task - -By utilizing the on-failure task, you can handle workflow failures gracefully and ensure that necessary actions are taken in case of errors. diff --git a/frontend/docs/pages/home/orchestration.mdx b/frontend/docs/pages/home/orchestration.mdx deleted file mode 100644 index 85ecda61b1..0000000000 --- a/frontend/docs/pages/home/orchestration.mdx +++ /dev/null @@ -1,14 +0,0 @@ -# Task Orchestration - -Not only can you run a single task in Hatchet, but you can also orchestrate multiple tasks together based on a shape that you define. For example, you can run a task that depends on the output of another task, or you can run a task that waits for a certain condition to be met before running. - -1. [Declarative Workflow Design (DAGs)](./dags.mdx) -- which is a way to declaratively define the sequence and dependencies of tasks in a workflow when you know the dependencies ahead of time. -2. [Procedural Child Spawning](./child-spawning.mdx) -- which is a way to orchestrate tasks in a workflow when you don't know the dependencies ahead of time or when the dependencies are dynamic. - -## Flow Controls - -In addition to coordinating the execution of tasks, Hatchet also provides a set of flow control primitives that allow you to orchestrate tasks in a workflow. This allows you to run only what your service can handle at any given time. - -1. [Worker Slots](./workers.mdx#understanding-slots) -- which is a way to control the number of tasks that can be executed concurrently on a given compute process. -2. [Concurrency Control](./concurrency.mdx) -- which is a global way to control the concurrent execution of tasks based on a specific key. -3. [Rate Limiting](./rate-limits.mdx) -- which is a global way to control the rate of task execution based on time period. diff --git a/frontend/docs/pages/home/running-tasks.mdx b/frontend/docs/pages/home/running-tasks.mdx deleted file mode 100644 index cba352bdf0..0000000000 --- a/frontend/docs/pages/home/running-tasks.mdx +++ /dev/null @@ -1,18 +0,0 @@ -import { Callout, Tabs } from "nextra/components"; -import UniversalTabs from "@/components/UniversalTabs"; -import { Snippet } from "@/components/code"; -import { snippets } from "@/lib/generated/snippets"; - -# Running Tasks - -Once you have a running worker, you'll want to run your tasks. Hatchet provides a number of ways of triggering task runs, from which you should select the one(s) that best suit(s) your use case. - -1. Tasks can be [run, and have their results waited on](./run-with-results.mdx) -2. Tasks can be [enqueued without waiting for their results ("fire and forget")](./run-no-wait.mdx). -3. Tasks can be run on [cron schedules](./cron-runs.mdx). -4. Tasks can be [triggered by events](./run-on-event.mdx). -5. Tasks can be [scheduled for a later time](./scheduled-runs.mdx). - -Each of these methods for triggering tasks have their own uses in different scenarios, and the next few sections will give some examples of each. - -These methods can be invoked directly from the workflow definition, or from other services. diff --git a/frontend/docs/pages/home/running-your-task.mdx b/frontend/docs/pages/home/running-your-task.mdx deleted file mode 100644 index b89d6cc305..0000000000 --- a/frontend/docs/pages/home/running-your-task.mdx +++ /dev/null @@ -1,32 +0,0 @@ -import { snippets } from "@/lib/generated/snippets"; -import { Snippet } from "@/components/code"; -import { Callout, Card, Cards, Steps, Tabs } from "nextra/components"; -import UniversalTabs from "@/components/UniversalTabs"; - -# Running Your First Task - -With your task defined, you can import it wherever you need to use it and invoke it with the `run` method. - - - - - - - - - - - - - - - - - - -There are many ways to run a task, including: - -- [Running a task with results](./run-with-results.mdx) -- [Enqueuing a task](./run-no-wait.mdx) -- [Scheduling a task](./scheduled-runs.mdx) -- [Scheduling a task with a cron schedule](./cron-runs.mdx) diff --git a/frontend/docs/pages/home/setup.mdx b/frontend/docs/pages/home/setup.mdx deleted file mode 100644 index ab6973c960..0000000000 --- a/frontend/docs/pages/home/setup.mdx +++ /dev/null @@ -1,31 +0,0 @@ -import Tabs from "../_setup/tabs.mdx"; -import { Callout } from "nextra/components"; - -# Advanced Setup - - - This guide is intended for users who want to explore Hatchet in more depth - beyond the quickstart. If you haven't already set up Hatchet, please see the - [Hatchet Cloud Quickstart](./hatchet-cloud-quickstart) or the [Self-Hosting - Quickstart](../../self-hosting/) first. - - -## Set environment variables - -All Hatchet SDKs require the `HATCHET_CLIENT_TOKEN` environment variable to be set. This token is automatically created when you run a CLI command like `hatchet worker dev` or `hatchet trigger`, but if you're setting up a project manually, you'll need to set this variable yourself. You can generate an API token from the Hatchet frontend by navigating to the `Settings` tab and clicking on the `API Tokens` tab. Click the `Generate API Token` button to create a new token. Set this environment variable in your project, and **do not share it publicly**. - -```bash copy -export HATCHET_CLIENT_TOKEN="" -``` - -Additionally, if you are a self-hosted user provisioning without TLS enabled, you will need to set the `HATCHET_CLIENT_TLS_STRATEGY` environment variable to `none`. If you are on Hatchet Cloud, TLS is enabled by default, so this is not required. - -```bash copy -export HATCHET_CLIENT_TLS_STRATEGY=none -``` - -## Setup your codebase - - - -Continue to the next section to learn how to [create your first task](./your-first-task) diff --git a/frontend/docs/pages/home/troubleshooting-workers.mdx b/frontend/docs/pages/home/troubleshooting-workers.mdx deleted file mode 100644 index b46cea79b7..0000000000 --- a/frontend/docs/pages/home/troubleshooting-workers.mdx +++ /dev/null @@ -1,29 +0,0 @@ -import { Tabs, Callout } from "nextra/components"; - -# Troubleshooting Hatchet Workers - -This guide aims to document common issues when deploying Hatchet workers. - -## Could not send task to worker - -If you see this error in the event history of a task, it could mean several things: - -1. The worker is closing its network connection while the task is being sent. This could be caused by the worker crashing or going offline. - -2. The payload is too large for the worker to accept or the Hatchet engine to send. The default maximum payload size is 4MB. Consider reducing the size of the input data or output data of your tasks. - -3. The worker has a large backlog of tasks in-flight on the network connection and is rejecting new tasks. This can occur if workers are geographically distant from the Hatchet engine or if there are network issues causing delays. Hatchet Cloud runs by default in `us-west-2` (Oregon, USA), so consider deploying your workers in a region close to that for the best performance. - - If you are self-hosting, you can increase the maximum backlog size via the `SERVER_GRPC_WORKER_STREAM_MAX_BACKLOG_SIZE` environment variable in your Hatchet engine configuration. The default is 20. - -## No workers visible in dashboard - -If you have deployed workers but they are not visible in the Hatchet dashboard, it is likely that: - -1. Your API token is invalid or incorrect. Ensure that the token you are using to start the worker matches the token generated in the Hatchet dashboard for your tenant. - -2. Worker heartbeats are not reaching the Hatchet engine. You will see noisy logs in the worker output if this is the case. - -## Phantom workers active in dashboard - -This is often due to workers still running in your deployed environment. We see this most often with very long termination periods for workers, or in local development environments where worker processes are leaking. If you are in a local development environment, you can usually view running Hatchet worker processes via `ps -a | grep worker` (or whatever your entrypoint binary is called) and kill them manually. diff --git a/frontend/docs/pages/home/workers.mdx b/frontend/docs/pages/home/workers.mdx deleted file mode 100644 index 528547234b..0000000000 --- a/frontend/docs/pages/home/workers.mdx +++ /dev/null @@ -1,170 +0,0 @@ -import { snippets } from "@/lib/generated/snippets"; -import { Snippet } from "@/components/code"; -import { Callout, Card, Cards, Steps, Tabs } from "nextra/components"; -import UniversalTabs from "@/components/UniversalTabs"; - -# Workers - -Workers are the backbone of Hatchet, responsible for executing the individual tasks. They operate across different nodes in your infrastructure, allowing for distributed and scalable task execution. - -## How Workers Operate - -In Hatchet, workers are long-running processes that wait for instructions from the Hatchet engine to execute specific steps. They communicate with the Hatchet engine to receive tasks, execute them, and report back the results. - -## Declaring a Worker - -Now that we have a [task declared](./your-first-task.mdx) we can create a worker that can execute the task. - -Declare a worker by calling the `worker` method on the Hatchet client. The `worker` method takes a name and an optional configuration object. - - - - - - - If you are using Windows, attempting to run a worker will result in an error: - - ``` - AttributeError: module 'signal' has no attribute 'SIGQUIT' - ``` - - However you can use the [Windows Subsystem for Linux (WSL)](https://learn.microsoft.com/en-us/windows/wsl/install) to run your workers. After - you install your Python environment (e.g. via `uv` or `poetry`) in WSL, you can then - run your workers inside that environment. You can still run client code (e.g. to - trigger task runs or query the API) in your native Windows environment, but your - workers have to be run in WSL. - - Another option is to run workers in Docker containers. - - - - - ### Register the Worker - - - - ### Add an Entrypoint Script - - Add a script to your `package.json` to start the worker (changing the file path to the location of your worker file): - - ```json - "scripts": { - "start:worker": "ts-node src/v1/examples/simple/worker.ts" - } - ``` - - ### Run the Worker - - Start the worker by running the script you just added to your `package.json`: - - - ```bash - npm run start:worker - ``` - - - ```bash - pnpm run start:worker - ``` - - - ```bash - yarn start:worker - ``` - - - - - - - - Then start the worker by running: - ```bash - go run main.go - ``` - - - Note there are both `worker.Start` and `worker.StartBlocking` methods. The `StartBlocking` method will block the main thread until the worker is stopped, while the `Start` method will return immediately and you'll need to call `worker.Stop` to stop the worker. - - - - - ### Add the Hatchet SDK to your Gemfile - - ```ruby - gem "hatchet-sdk" - ``` - - Then install with: - - ```bash - bundle install - ``` - - ### Register the Worker - - - - ### Run the Worker - - Start the worker by running: - - ```bash - bundle exec ruby worker.rb - ``` - - - - -And that's it! Once you run your script to start the worker, you'll see some logs like this, which tell you that your worker is running. - - - For self-hosted users, you may need to set other gRPC configuration options to - ensure your worker can connect to the Hatchet engine. See the - [Self-Hosting](../self-hosting/worker-configuration-options.mdx) docs for more - information. - - -``` -[DEBUG] 🪓 -- 2025-03-24 15:11:32,755 - creating new event loop -[INFO] 🪓 -- 2025-03-24 15:11:32,755 - ------------------------------------------ -[INFO] 🪓 -- 2025-03-24 15:11:32,755 - STARTING HATCHET... -[DEBUG] 🪓 -- 2025-03-24 15:11:32,755 - worker runtime starting on PID: 26406 -[DEBUG] 🪓 -- 2025-03-24 15:11:32,758 - action listener starting on PID: 26434 -[INFO] 🪓 -- 2025-03-24 15:11:32,760 - starting runner... -[DEBUG] 🪓 -- 2025-03-24 15:11:32,761 - starting action listener health check... -[DEBUG] 🪓 -- 2025-03-24 15:11:32,764 - 'test-worker' waiting for ['simpletask:step1'] -[DEBUG] 🪓 -- 2025-03-24 15:11:33,413 - starting action listener: test-worker -[DEBUG] 🪓 -- 2025-03-24 15:11:33,542 - acquired action listener: efc4aaf2-be4a-4964-a578-db6465f9297e -[DEBUG] 🪓 -- 2025-03-24 15:11:33,542 - sending heartbeat -[DEBUG] 🪓 -- 2025-03-24 15:11:37,658 - sending heartbeat -``` - - - Note that many of these logs are `debug` logs, which only are shown if the - `debug` option on the Hatchet client is set to `True` - - -## Understanding Slots - -Slots are the number of concurrent _task_ runs that a worker can execute, are are configured using the `slots` option on the worker. For instance, if you set `slots=5` on your worker, then your worker will be able to run five tasks concurrently before new tasks start needing to wait in the queue before being picked up. Increasing the number of `slots` on your worker, or the number of workers you run, will allow you to handle more concurrent work (and thus more throughput, in many cases). - -An important caveat is that slot-level concurrency is only helpful up to the point where the worker is not bottlenecked by another resource, such as CPU, memory, or network bandwidth. If your worker is bottlenecked by one of these resources, increasing the number of slots will not improve throughput. - -## Best Practices for Managing Workers - -To ensure a robust and efficient Hatchet implementation, consider the following best practices when managing your workers: - -1. **Reliability**: Deploy workers in a stable environment with sufficient resources to avoid resource contention and ensure reliable execution. - -2. **Monitoring and Logging**: Implement robust monitoring and logging mechanisms to track worker health, performance, and task execution status. - -3. **Error Handling**: Design workers to handle errors gracefully, report execution failures to Hatchet, and retry tasks based on configured policies. - -4. **Secure Communication**: Ensure secure communication between workers and the Hatchet engine, especially when distributed across different networks. - -5. **Lifecycle Management**: Implement proper lifecycle management for workers, including automatic restarts on critical failures and graceful shutdown procedures. - -6. **Scalability**: Plan for scalability by designing your system to easily add or remove workers based on demand, leveraging containerization, orchestration tools, or cloud auto-scaling features. - -7. **Consistent Updates**: Keep worker implementations up to date with the latest Hatchet SDKs and ensure compatibility with the Hatchet engine version. diff --git a/frontend/docs/pages/home/your-first-task.mdx b/frontend/docs/pages/home/your-first-task.mdx deleted file mode 100644 index 0b3dd3bcac..0000000000 --- a/frontend/docs/pages/home/your-first-task.mdx +++ /dev/null @@ -1,69 +0,0 @@ -import { snippets } from "@/lib/generated/snippets"; -import { Snippet } from "@/components/code"; -import { Callout, Card, Cards, Steps, Tabs } from "nextra/components"; -import UniversalTabs from "@/components/UniversalTabs"; - -# Declaring Your First Task - -In Hatchet, the fundamental unit of invocable work is a [Task](#defining-a-task). Each task is an atomic function. - -As we continue to build with Hatchet, we'll add additional configuration options to compose tasks into [DAG workflows](./dags.mdx) or [procedural child spawning](./child-spawning.mdx). - -## Defining a Task - -Start by declaring a task with a name. The task object can declare additional task-level configuration options which we'll cover later. - -The returned object is an instance of the `Task` class, which is the primary interface for interacting with the task (i.e. [running](./run-with-results.mdx), [enqueuing](./run-no-wait.mdx), [scheduling](./scheduled-runs.mdx), etc). - - - - - - - - - - - - - - - - - -## Running a Task - -With your task defined, you can import it wherever you need to use it and invoke it with the `run` method. - - - NOTE: You must first [register the task on a worker](./workers.mdx) before you - can run it. Calling `your_task.run` will enqueue a task to be executed by a - worker but it will wait indefinitely for the task to be executed. - - - - - - - - - - - - - - - - - - - -There are many ways to run a task, including: - -- [Running a task with results](./run-with-results.mdx) -- [Enqueuing a task](./run-no-wait.mdx) -- [Scheduling a task](./scheduled-runs.mdx) -- [Scheduling a task with a cron schedule](./cron-runs.mdx) -- [Event-driven task execution](./run-on-event.mdx) - -Now that you have defined a complete task, you can move on to [creating a worker to execute the task](./workers.mdx). diff --git a/frontend/docs/pages/sdks/_meta.js b/frontend/docs/pages/reference/_meta.js similarity index 60% rename from frontend/docs/pages/sdks/_meta.js rename to frontend/docs/pages/reference/_meta.js index 7d173b8f62..106a043d33 100644 --- a/frontend/docs/pages/sdks/_meta.js +++ b/frontend/docs/pages/reference/_meta.js @@ -7,7 +7,14 @@ export default { }, }, typescript: { - title: "TypeScript SDK", + title: "Typescript SDK", + type: "page", + theme: { + toc: true, + }, + }, + cli: { + title: "CLI Reference", type: "page", theme: { toc: true, diff --git a/frontend/docs/pages/cli/_meta.js b/frontend/docs/pages/reference/cli/_meta.js similarity index 100% rename from frontend/docs/pages/cli/_meta.js rename to frontend/docs/pages/reference/cli/_meta.js diff --git a/frontend/docs/pages/cli/index.mdx b/frontend/docs/pages/reference/cli/index.mdx similarity index 100% rename from frontend/docs/pages/cli/index.mdx rename to frontend/docs/pages/reference/cli/index.mdx diff --git a/frontend/docs/pages/cli/profiles.mdx b/frontend/docs/pages/reference/cli/profiles.mdx similarity index 100% rename from frontend/docs/pages/cli/profiles.mdx rename to frontend/docs/pages/reference/cli/profiles.mdx diff --git a/frontend/docs/pages/cli/running-hatchet-locally.mdx b/frontend/docs/pages/reference/cli/running-hatchet-locally.mdx similarity index 100% rename from frontend/docs/pages/cli/running-hatchet-locally.mdx rename to frontend/docs/pages/reference/cli/running-hatchet-locally.mdx diff --git a/frontend/docs/pages/cli/running-workers-locally.mdx b/frontend/docs/pages/reference/cli/running-workers-locally.mdx similarity index 100% rename from frontend/docs/pages/cli/running-workers-locally.mdx rename to frontend/docs/pages/reference/cli/running-workers-locally.mdx diff --git a/frontend/docs/pages/cli/triggering-workflows.mdx b/frontend/docs/pages/reference/cli/triggering-workflows.mdx similarity index 100% rename from frontend/docs/pages/cli/triggering-workflows.mdx rename to frontend/docs/pages/reference/cli/triggering-workflows.mdx diff --git a/frontend/docs/pages/cli/tui.mdx b/frontend/docs/pages/reference/cli/tui.mdx similarity index 100% rename from frontend/docs/pages/cli/tui.mdx rename to frontend/docs/pages/reference/cli/tui.mdx diff --git a/frontend/docs/pages/reference/python/_meta.js b/frontend/docs/pages/reference/python/_meta.js new file mode 100644 index 0000000000..c93c38e007 --- /dev/null +++ b/frontend/docs/pages/reference/python/_meta.js @@ -0,0 +1,63 @@ +export default { + client: { + title: "Client", + theme: { + toc: true, + }, + }, + + context: { + title: "Context", + theme: { + toc: true, + }, + }, + + "feature-clients": { + title: "Feature Clients", + theme: { + toc: true, + }, + }, + + runnables: { + title: "Runnables", + theme: { + toc: true, + }, + }, + "--python-specifics": { + title: "Python Specifics", + type: "separator", + }, + asyncio: { + title: "Asyncio", + theme: { + toc: true, + }, + }, + pydantic: { + title: "Pydantic", + theme: { + toc: true, + }, + }, + lifespans: { + title: "Lifespans", + theme: { + toc: true, + }, + }, + "dependency-injection": { + title: "Dependency Injection", + theme: { + toc: true, + }, + }, + dataclasses: { + title: "Dataclass Support", + theme: { + toc: true, + }, + }, +}; diff --git a/frontend/docs/pages/home/asyncio.mdx b/frontend/docs/pages/reference/python/asyncio.mdx similarity index 100% rename from frontend/docs/pages/home/asyncio.mdx rename to frontend/docs/pages/reference/python/asyncio.mdx diff --git a/frontend/docs/pages/sdks/python/client.mdx b/frontend/docs/pages/reference/python/client.mdx similarity index 100% rename from frontend/docs/pages/sdks/python/client.mdx rename to frontend/docs/pages/reference/python/client.mdx diff --git a/frontend/docs/pages/sdks/python/context.mdx b/frontend/docs/pages/reference/python/context.mdx similarity index 98% rename from frontend/docs/pages/sdks/python/context.mdx rename to frontend/docs/pages/reference/python/context.mdx index dd45e68658..ada9c8cd07 100644 --- a/frontend/docs/pages/sdks/python/context.mdx +++ b/frontend/docs/pages/reference/python/context.mdx @@ -13,7 +13,7 @@ There are two types of context classes you'll encounter: | Name | Description | | ---------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | -| `was_skipped` | Check if a given task was skipped. You can read about skipping in [the docs](../../home/conditional-workflows#skip_if). | +| `was_skipped` | Check if a given task was skipped. You can read about skipping in [the docs](/v1/durable-workflows/conditions#checking-if-a-task-was-skipped). | | `task_output` | Get the output of a parent task in a DAG. | | `cancel` | Cancel the current task run. This will call the Hatchet API to cancel the step run and set the exit flag to True. | | `aio_cancel` | Cancel the current task run. This will call the Hatchet API to cancel the step run and set the exit flag to True. | @@ -146,7 +146,7 @@ Returns: #### `was_skipped` -Check if a given task was skipped. You can read about skipping in [the docs](../../home/conditional-workflows#skip_if). +Check if a given task was skipped. You can read about skipping in [the docs](/v1/durable-workflows/conditions#checking-if-a-task-was-skipped). Parameters: diff --git a/frontend/docs/pages/home/dataclasses.mdx b/frontend/docs/pages/reference/python/dataclasses.mdx similarity index 100% rename from frontend/docs/pages/home/dataclasses.mdx rename to frontend/docs/pages/reference/python/dataclasses.mdx diff --git a/frontend/docs/pages/reference/python/dependency-injection.mdx b/frontend/docs/pages/reference/python/dependency-injection.mdx new file mode 100644 index 0000000000..83d6dd43e3 --- /dev/null +++ b/frontend/docs/pages/reference/python/dependency-injection.mdx @@ -0,0 +1,43 @@ +import { snippets } from "@/lib/generated/snippets"; +import { Snippet } from "@/components/code"; +import { Callout, Card, Cards, Steps, Tabs } from "nextra/components"; +import UniversalTabs from "@/components/UniversalTabs"; + +# Dependency Injection + + + Dependency injection is an **experimental feature** in Hatchet, and is subject + to change. + + +Hatchet's Python SDK allows you to inject **_dependencies_** into your tasks, FastAPI style. These dependencies can be either synchronous or asynchronous functions. They are executed before the task is triggered, and their results are injected into the task as parameters. + +This behaves almost identically to [FastAPI's dependency injection](https://fastapi.tiangolo.com/tutorial/dependencies/), and is intended to be used in the same way. Dependencies are useful for sharing logic between tasks that you'd like to avoid repeating, or would like to factor out of the task logic itself (e.g. to make testing easier). + + +Since dependencies are run before tasks are executed, having many dependencies (or any that take a long time to evaluate) can cause tasks to experience significantly delayed start times, as they must wait for all dependencies to finish evaluating. + + + +## Usage + +To add dependencies to your tasks, import `Depends` from the `hatchet_sdk`. Then: + + + +In this example, we've declared two dependencies: one synchronous and one asynchronous. You can do anything you like in your dependencies, such as creating database sessions, managing configuration, sharing instances of service-layer logic, and more. + +Once you've defined your dependency functions, inject them into your tasks as follows: + + + + + Important note: Your dependency functions must take two positional arguments: + the workflow input and the `Context` (the same as any other task). + + +That's it! Now, whenever your task is triggered, its dependencies will be evaluated, and the results will be injected into the task at runtime for you to use as needed. diff --git a/frontend/docs/pages/sdks/python/feature-clients/_meta.js b/frontend/docs/pages/reference/python/feature-clients/_meta.js similarity index 100% rename from frontend/docs/pages/sdks/python/feature-clients/_meta.js rename to frontend/docs/pages/reference/python/feature-clients/_meta.js diff --git a/frontend/docs/pages/sdks/python/feature-clients/cron.mdx b/frontend/docs/pages/reference/python/feature-clients/cron.mdx similarity index 100% rename from frontend/docs/pages/sdks/python/feature-clients/cron.mdx rename to frontend/docs/pages/reference/python/feature-clients/cron.mdx diff --git a/frontend/docs/pages/sdks/python/feature-clients/filters.mdx b/frontend/docs/pages/reference/python/feature-clients/filters.mdx similarity index 100% rename from frontend/docs/pages/sdks/python/feature-clients/filters.mdx rename to frontend/docs/pages/reference/python/feature-clients/filters.mdx diff --git a/frontend/docs/pages/sdks/python/feature-clients/logs.mdx b/frontend/docs/pages/reference/python/feature-clients/logs.mdx similarity index 100% rename from frontend/docs/pages/sdks/python/feature-clients/logs.mdx rename to frontend/docs/pages/reference/python/feature-clients/logs.mdx diff --git a/frontend/docs/pages/sdks/python/feature-clients/metrics.mdx b/frontend/docs/pages/reference/python/feature-clients/metrics.mdx similarity index 100% rename from frontend/docs/pages/sdks/python/feature-clients/metrics.mdx rename to frontend/docs/pages/reference/python/feature-clients/metrics.mdx diff --git a/frontend/docs/pages/sdks/python/feature-clients/rate_limits.mdx b/frontend/docs/pages/reference/python/feature-clients/rate_limits.mdx similarity index 100% rename from frontend/docs/pages/sdks/python/feature-clients/rate_limits.mdx rename to frontend/docs/pages/reference/python/feature-clients/rate_limits.mdx diff --git a/frontend/docs/pages/sdks/python/feature-clients/runs.mdx b/frontend/docs/pages/reference/python/feature-clients/runs.mdx similarity index 99% rename from frontend/docs/pages/sdks/python/feature-clients/runs.mdx rename to frontend/docs/pages/reference/python/feature-clients/runs.mdx index 3bb4a67cdd..f1b429cb40 100644 --- a/frontend/docs/pages/sdks/python/feature-clients/runs.mdx +++ b/frontend/docs/pages/reference/python/feature-clients/runs.mdx @@ -159,7 +159,7 @@ Returns: Trigger a new workflow run. -IMPORTANT: It's preferable to use `Workflow.run` (and similar) to trigger workflows if possible. This method is intended to be an escape hatch. For more details, see [the documentation](../../../sdks/python/runnables#workflow). +IMPORTANT: It's preferable to use `Workflow.run` (and similar) to trigger workflows if possible. This method is intended to be an escape hatch. For more details, see [the documentation](../../../sdk/python/runnables#workflow). Parameters: @@ -180,7 +180,7 @@ Returns: Trigger a new workflow run. -IMPORTANT: It's preferable to use `Workflow.run` (and similar) to trigger workflows if possible. This method is intended to be an escape hatch. For more details, see [the documentation](../../../sdks/python/runnables#workflow). +IMPORTANT: It's preferable to use `Workflow.run` (and similar) to trigger workflows if possible. This method is intended to be an escape hatch. For more details, see [the documentation](../../../sdk/python/runnables#workflow). Parameters: diff --git a/frontend/docs/pages/sdks/python/feature-clients/scheduled.mdx b/frontend/docs/pages/reference/python/feature-clients/scheduled.mdx similarity index 99% rename from frontend/docs/pages/sdks/python/feature-clients/scheduled.mdx rename to frontend/docs/pages/reference/python/feature-clients/scheduled.mdx index 8146aa9ce5..f5b7812199 100644 --- a/frontend/docs/pages/sdks/python/feature-clients/scheduled.mdx +++ b/frontend/docs/pages/reference/python/feature-clients/scheduled.mdx @@ -23,7 +23,7 @@ Methods: Creates a new scheduled workflow run. -IMPORTANT: It's preferable to use `Workflow.run` (and similar) to trigger workflows if possible. This method is intended to be an escape hatch. For more details, see [the documentation](../../../sdks/python/runnables#workflow). +IMPORTANT: It's preferable to use `Workflow.run` (and similar) to trigger workflows if possible. This method is intended to be an escape hatch. For more details, see [the documentation](../../../sdk/python/runnables#workflow). Parameters: @@ -99,7 +99,7 @@ Returns: Creates a new scheduled workflow run. -IMPORTANT: It's preferable to use `Workflow.run` (and similar) to trigger workflows if possible. This method is intended to be an escape hatch. For more details, see [the documentation](../../../sdks/python/runnables#workflow). +IMPORTANT: It's preferable to use `Workflow.run` (and similar) to trigger workflows if possible. This method is intended to be an escape hatch. For more details, see [the documentation](../../../sdk/python/runnables#workflow). Parameters: diff --git a/frontend/docs/pages/sdks/python/feature-clients/workers.mdx b/frontend/docs/pages/reference/python/feature-clients/workers.mdx similarity index 100% rename from frontend/docs/pages/sdks/python/feature-clients/workers.mdx rename to frontend/docs/pages/reference/python/feature-clients/workers.mdx diff --git a/frontend/docs/pages/sdks/python/feature-clients/workflows.mdx b/frontend/docs/pages/reference/python/feature-clients/workflows.mdx similarity index 100% rename from frontend/docs/pages/sdks/python/feature-clients/workflows.mdx rename to frontend/docs/pages/reference/python/feature-clients/workflows.mdx diff --git a/frontend/docs/pages/home/lifespans.mdx b/frontend/docs/pages/reference/python/lifespans.mdx similarity index 100% rename from frontend/docs/pages/home/lifespans.mdx rename to frontend/docs/pages/reference/python/lifespans.mdx diff --git a/frontend/docs/pages/home/pydantic.mdx b/frontend/docs/pages/reference/python/pydantic.mdx similarity index 100% rename from frontend/docs/pages/home/pydantic.mdx rename to frontend/docs/pages/reference/python/pydantic.mdx diff --git a/frontend/docs/pages/sdks/python/runnables.mdx b/frontend/docs/pages/reference/python/runnables.mdx similarity index 100% rename from frontend/docs/pages/sdks/python/runnables.mdx rename to frontend/docs/pages/reference/python/runnables.mdx diff --git a/frontend/docs/pages/sdks/typescript/Context.mdx b/frontend/docs/pages/reference/typescript/Context.mdx similarity index 100% rename from frontend/docs/pages/sdks/typescript/Context.mdx rename to frontend/docs/pages/reference/typescript/Context.mdx diff --git a/frontend/docs/pages/sdks/typescript/Runnables.mdx b/frontend/docs/pages/reference/typescript/Runnables.mdx similarity index 100% rename from frontend/docs/pages/sdks/typescript/Runnables.mdx rename to frontend/docs/pages/reference/typescript/Runnables.mdx diff --git a/frontend/docs/pages/sdks/typescript/_meta.js b/frontend/docs/pages/reference/typescript/_meta.js similarity index 100% rename from frontend/docs/pages/sdks/typescript/_meta.js rename to frontend/docs/pages/reference/typescript/_meta.js diff --git a/frontend/docs/pages/sdks/typescript/client.mdx b/frontend/docs/pages/reference/typescript/client.mdx similarity index 100% rename from frontend/docs/pages/sdks/typescript/client.mdx rename to frontend/docs/pages/reference/typescript/client.mdx diff --git a/frontend/docs/pages/sdks/typescript/feature-clients/_meta.js b/frontend/docs/pages/reference/typescript/feature-clients/_meta.js similarity index 100% rename from frontend/docs/pages/sdks/typescript/feature-clients/_meta.js rename to frontend/docs/pages/reference/typescript/feature-clients/_meta.js diff --git a/frontend/docs/pages/sdks/typescript/feature-clients/crons.mdx b/frontend/docs/pages/reference/typescript/feature-clients/crons.mdx similarity index 100% rename from frontend/docs/pages/sdks/typescript/feature-clients/crons.mdx rename to frontend/docs/pages/reference/typescript/feature-clients/crons.mdx diff --git a/frontend/docs/pages/sdks/typescript/feature-clients/filters.mdx b/frontend/docs/pages/reference/typescript/feature-clients/filters.mdx similarity index 100% rename from frontend/docs/pages/sdks/typescript/feature-clients/filters.mdx rename to frontend/docs/pages/reference/typescript/feature-clients/filters.mdx diff --git a/frontend/docs/pages/sdks/typescript/feature-clients/metrics.mdx b/frontend/docs/pages/reference/typescript/feature-clients/metrics.mdx similarity index 100% rename from frontend/docs/pages/sdks/typescript/feature-clients/metrics.mdx rename to frontend/docs/pages/reference/typescript/feature-clients/metrics.mdx diff --git a/frontend/docs/pages/sdks/typescript/feature-clients/ratelimits.mdx b/frontend/docs/pages/reference/typescript/feature-clients/ratelimits.mdx similarity index 100% rename from frontend/docs/pages/sdks/typescript/feature-clients/ratelimits.mdx rename to frontend/docs/pages/reference/typescript/feature-clients/ratelimits.mdx diff --git a/frontend/docs/pages/sdks/typescript/feature-clients/runs.mdx b/frontend/docs/pages/reference/typescript/feature-clients/runs.mdx similarity index 100% rename from frontend/docs/pages/sdks/typescript/feature-clients/runs.mdx rename to frontend/docs/pages/reference/typescript/feature-clients/runs.mdx diff --git a/frontend/docs/pages/sdks/typescript/feature-clients/schedules.mdx b/frontend/docs/pages/reference/typescript/feature-clients/schedules.mdx similarity index 100% rename from frontend/docs/pages/sdks/typescript/feature-clients/schedules.mdx rename to frontend/docs/pages/reference/typescript/feature-clients/schedules.mdx diff --git a/frontend/docs/pages/sdks/typescript/feature-clients/webhooks.mdx b/frontend/docs/pages/reference/typescript/feature-clients/webhooks.mdx similarity index 100% rename from frontend/docs/pages/sdks/typescript/feature-clients/webhooks.mdx rename to frontend/docs/pages/reference/typescript/feature-clients/webhooks.mdx diff --git a/frontend/docs/pages/sdks/typescript/feature-clients/workers.mdx b/frontend/docs/pages/reference/typescript/feature-clients/workers.mdx similarity index 100% rename from frontend/docs/pages/sdks/typescript/feature-clients/workers.mdx rename to frontend/docs/pages/reference/typescript/feature-clients/workers.mdx diff --git a/frontend/docs/pages/sdks/typescript/feature-clients/workflows.mdx b/frontend/docs/pages/reference/typescript/feature-clients/workflows.mdx similarity index 100% rename from frontend/docs/pages/sdks/typescript/feature-clients/workflows.mdx rename to frontend/docs/pages/reference/typescript/feature-clients/workflows.mdx diff --git a/frontend/docs/pages/sdks/python/_meta.js b/frontend/docs/pages/sdks/python/_meta.js deleted file mode 100644 index 575cead22c..0000000000 --- a/frontend/docs/pages/sdks/python/_meta.js +++ /dev/null @@ -1,29 +0,0 @@ -export default { - client: { - title: "Client", - theme: { - toc: true, - }, - }, - - context: { - title: "Context", - theme: { - toc: true, - }, - }, - - "feature-clients": { - title: "Feature Clients", - theme: { - toc: true, - }, - }, - - runnables: { - title: "Runnables", - theme: { - toc: true, - }, - }, -}; diff --git a/frontend/docs/pages/self-hosting/improving-performance.mdx b/frontend/docs/pages/self-hosting/improving-performance.mdx index 34d37dd790..574c8cfaac 100644 --- a/frontend/docs/pages/self-hosting/improving-performance.mdx +++ b/frontend/docs/pages/self-hosting/improving-performance.mdx @@ -77,7 +77,7 @@ c.Event().BulkPush( #### Workflows - + diff --git a/frontend/docs/pages/self-hosting/prometheus-metrics.mdx b/frontend/docs/pages/self-hosting/prometheus-metrics.mdx index 80833f35f2..e205618731 100644 --- a/frontend/docs/pages/self-hosting/prometheus-metrics.mdx +++ b/frontend/docs/pages/self-hosting/prometheus-metrics.mdx @@ -33,7 +33,7 @@ Once enabled, you can setup any scraper that supports ingesting Prometheus metri Prometheus metrics. -To enable the [tenant API endpoint](/home/prometheus-metrics) you can set the following environment variables: +To enable the [tenant API endpoint](/v1/prometheus-metrics) you can set the following environment variables: - Required - **`SERVER_PROMETHEUS_SERVER_URL`** (`prometheus.prometheusServerURL`) diff --git a/frontend/docs/pages/v1/_meta.js b/frontend/docs/pages/v1/_meta.js new file mode 100644 index 0000000000..0cc8591351 --- /dev/null +++ b/frontend/docs/pages/v1/_meta.js @@ -0,0 +1,126 @@ +export default { + "--get-started": { + title: "Get Started", + type: "separator", + }, + index: "🪓 What is Hatchet?", + quickstart: "Quickstart", + "using-coding-agents": "Using Coding Agents", + "--core-concepts": { + title: "Core Concepts", + type: "separator", + }, + tasks: "Tasks", + workers: "Workers", + "running-your-task": "Running Tasks", + "durable-workflows-overview": "Intro to Durable Workflows", + "--triggers": { + title: "Triggers", + type: "separator", + }, + "scheduled-runs": "Scheduled Runs", + "cron-runs": "Cron Runs", + "bulk-run": "Bulk Runs", + webhooks: "Webhooks", + "external-events": { + title: "Event Triggers", + theme: { collapsed: true }, + }, + "inter-service-triggering": "Inter-Service Triggering", + "--reliability": { + title: "Reliability", + type: "separator", + }, + "retry-policies": "Retry Policies", + timeouts: "Timeouts", + cancellation: "Cancellation", + "bulk-retries-and-cancellations": "Bulk Retries & Cancellations", + "--flow-control": { + title: "Flow Control", + type: "separator", + }, + concurrency: "Concurrency", + "rate-limits": "Rate Limits", + priority: "Priority", + "--durable-workflows-section": { + title: "Durable Workflows", + type: "separator", + }, + patterns: { + title: "Patterns", + theme: { collapsed: true }, + }, + "child-spawning": "Child Spawning", + sleep: "Sleep & Delays", + events: "Wait For Events", + conditions: "Conditions & Branching", + "on-failure": "Error Handling", + "task-eviction": "Resource Management", + "--workers-section": { + title: "Workers", + type: "separator", + }, + docker: "Running with Docker", + "autoscaling-workers": "Autoscaling Workers", + "advanced-assignment": { + title: "Advanced Assignment", + theme: { collapsed: true }, + }, + "--observability": { + title: "Observability", + type: "separator", + }, + logging: "Logging", + opentelemetry: "OpenTelemetry", + "worker-healthchecks": "Worker Health Checks", + "prometheus-metrics": "Prometheus Metrics", + "additional-metadata": "Additional Metadata", + "--operations": { + title: "Operations", + type: "separator", + }, + middleware: "Middleware", + streaming: "Streaming", + environments: "Environments", + troubleshooting: { + title: "Troubleshooting", + theme: { collapsed: true }, + }, + "--evaluate": { + title: "Evaluating Hatchet?", + type: "separator", + }, + "architecture-and-guarantees": "Architecture & Guarantees", + "cloud-vs-oss": "Cloud vs OSS", + security: "Security", + "region-availability": "Region Availability", + uptime: "Uptime", + "developer-experience": "Developer Experience", + _setup: { + display: "hidden", + }, + compute: { + title: "Managed Compute", + type: "page", + display: "hidden", + }, + migrating: { + title: "V0 to V1 Upgrade Guide", + display: "hidden", + }, + "advanced-tasks": { + display: "hidden", + }, + runnables: { + display: "hidden", + }, + "error-handling": { + display: "hidden", + }, + "flow-control": { + display: "hidden", + }, + observability: { + display: "hidden", + }, +}; diff --git a/frontend/docs/pages/v1/_setup/_clone/go.mdx b/frontend/docs/pages/v1/_setup/_clone/go.mdx new file mode 100644 index 0000000000..f9340367d3 --- /dev/null +++ b/frontend/docs/pages/v1/_setup/_clone/go.mdx @@ -0,0 +1,44 @@ +import { Callout, Card, Cards, Steps, Tabs } from "nextra/components"; +import UniversalTabs from "@/components/UniversalTabs"; +import InstallCommand from "@/components/InstallCommand"; + +#### Clone a Quickstart Project + +```bash copy +git clone https://github.com/hatchet-dev/hatchet-go-quickstart.git +``` + +#### CD into the project + +```bash copy +cd hatchet-go-quickstart +``` + +#### Install dependencies + + +#### Start a Worker + +The fastest way to run a worker during development is with the Hatchet CLI: + +```bash copy +hatchet worker dev +``` + +You can also run the worker script directly. This requires a `HATCHET_CLIENT_TOKEN` environment variable. You can generate an API token from the Hatchet dashboard by navigating to the `Settings` tab and clicking `API Tokens`. Click `Generate API Token` to create a new token, and **do not share it publicly**. + +```bash copy +export HATCHET_CLIENT_TOKEN="" +``` + +If you are a self-hosted user without TLS enabled, also set: + +```bash copy +export HATCHET_CLIENT_TLS_STRATEGY=none +``` + +Then run your worker: + +```bash copy +go run main.go +``` diff --git a/frontend/docs/pages/v1/_setup/_clone/py.mdx b/frontend/docs/pages/v1/_setup/_clone/py.mdx new file mode 100644 index 0000000000..e6c6aff471 --- /dev/null +++ b/frontend/docs/pages/v1/_setup/_clone/py.mdx @@ -0,0 +1,44 @@ +import { Callout, Card, Cards, Steps, Tabs } from "nextra/components"; +import UniversalTabs from "@/components/UniversalTabs"; +import InstallCommand from "@/components/InstallCommand"; + +#### Clone a Quickstart Project + +```bash copy +git clone https://github.com/hatchet-dev/hatchet-python-quickstart.git +``` + +#### CD into the project + +```bash copy +cd hatchet-python-quickstart +``` + +#### Install dependencies + + +#### Start a Worker + +The fastest way to run a worker during development is with the Hatchet CLI: + +```bash copy +hatchet worker dev +``` + +You can also run the worker script directly. This requires a `HATCHET_CLIENT_TOKEN` environment variable. You can generate an API token from the Hatchet dashboard by navigating to the `Settings` tab and clicking `API Tokens`. Click `Generate API Token` to create a new token, and **do not share it publicly**. + +```bash copy +export HATCHET_CLIENT_TOKEN="" +``` + +If you are a self-hosted user without TLS enabled, also set: + +```bash copy +export HATCHET_CLIENT_TLS_STRATEGY=none +``` + +Then run your worker: + +```bash copy +python worker.py +``` diff --git a/frontend/docs/pages/v1/_setup/_clone/ts.mdx b/frontend/docs/pages/v1/_setup/_clone/ts.mdx new file mode 100644 index 0000000000..dde9591c40 --- /dev/null +++ b/frontend/docs/pages/v1/_setup/_clone/ts.mdx @@ -0,0 +1,44 @@ +import { Callout, Card, Cards, Steps, Tabs } from "nextra/components"; +import UniversalTabs from "@/components/UniversalTabs"; +import InstallCommand from "@/components/InstallCommand"; + +#### Clone a Quickstart Project + +```bash copy +git clone https://github.com/hatchet-dev/hatchet-typescript-quickstart.git +``` + +#### CD into the project + +```bash copy +cd hatchet-typescript-quickstart +``` + +#### Install dependencies + + +#### Start a Worker + +The fastest way to run a worker during development is with the Hatchet CLI: + +```bash copy +hatchet worker dev +``` + +You can also run the worker script directly. This requires a `HATCHET_CLIENT_TOKEN` environment variable. You can generate an API token from the Hatchet dashboard by navigating to the `Settings` tab and clicking `API Tokens`. Click `Generate API Token` to create a new token, and **do not share it publicly**. + +```bash copy +export HATCHET_CLIENT_TOKEN="" +``` + +If you are a self-hosted user without TLS enabled, also set: + +```bash copy +export HATCHET_CLIENT_TLS_STRATEGY=none +``` + +Then run your worker: + +```bash copy +npx ts-node src/worker.ts +``` diff --git a/frontend/docs/pages/_setup/_existing/go.mdx b/frontend/docs/pages/v1/_setup/_existing/go.mdx similarity index 58% rename from frontend/docs/pages/_setup/_existing/go.mdx rename to frontend/docs/pages/v1/_setup/_existing/go.mdx index 54ae63c6ab..21f38d09ad 100644 --- a/frontend/docs/pages/_setup/_existing/go.mdx +++ b/frontend/docs/pages/v1/_setup/_existing/go.mdx @@ -42,3 +42,29 @@ func main() { } } ``` + +#### Start a Worker + +The fastest way to run a worker during development is with the Hatchet CLI: + +```bash copy +hatchet worker dev +``` + +You can also run the worker script directly. This requires a `HATCHET_CLIENT_TOKEN` environment variable. You can generate an API token from the Hatchet dashboard by navigating to the `Settings` tab and clicking `API Tokens`. Click `Generate API Token` to create a new token, and **do not share it publicly**. + +```bash copy +export HATCHET_CLIENT_TOKEN="" +``` + +If you are a self-hosted user without TLS enabled, also set: + +```bash copy +export HATCHET_CLIENT_TLS_STRATEGY=none +``` + +Then run your worker: + +```bash copy +go run main.go +``` diff --git a/frontend/docs/pages/_setup/_existing/py.mdx b/frontend/docs/pages/v1/_setup/_existing/py.mdx similarity index 62% rename from frontend/docs/pages/_setup/_existing/py.mdx rename to frontend/docs/pages/v1/_setup/_existing/py.mdx index 42b2741d5c..b46a11a840 100644 --- a/frontend/docs/pages/_setup/_existing/py.mdx +++ b/frontend/docs/pages/v1/_setup/_existing/py.mdx @@ -42,3 +42,29 @@ You can now import the Hatchet Client in any file that needs it. ```python copy from src.hatchet_client import hatchet ``` + +#### Start a Worker + +The fastest way to run a worker during development is with the Hatchet CLI: + +```bash copy +hatchet worker dev +``` + +You can also run the worker script directly. This requires a `HATCHET_CLIENT_TOKEN` environment variable. You can generate an API token from the Hatchet dashboard by navigating to the `Settings` tab and clicking `API Tokens`. Click `Generate API Token` to create a new token, and **do not share it publicly**. + +```bash copy +export HATCHET_CLIENT_TOKEN="" +``` + +If you are a self-hosted user without TLS enabled, also set: + +```bash copy +export HATCHET_CLIENT_TLS_STRATEGY=none +``` + +Then run your worker: + +```bash copy +python worker.py +``` diff --git a/frontend/docs/pages/_setup/_existing/ts.mdx b/frontend/docs/pages/v1/_setup/_existing/ts.mdx similarity index 59% rename from frontend/docs/pages/_setup/_existing/ts.mdx rename to frontend/docs/pages/v1/_setup/_existing/ts.mdx index 826bf8050a..dd0658af44 100644 --- a/frontend/docs/pages/_setup/_existing/ts.mdx +++ b/frontend/docs/pages/v1/_setup/_existing/ts.mdx @@ -36,3 +36,28 @@ touch hatchet-client.ts Add the following code to the file: +#### Start a Worker + +The fastest way to run a worker during development is with the Hatchet CLI: + +```bash copy +hatchet worker dev +``` + +You can also run the worker script directly. This requires a `HATCHET_CLIENT_TOKEN` environment variable. You can generate an API token from the Hatchet dashboard by navigating to the `Settings` tab and clicking `API Tokens`. Click `Generate API Token` to create a new token, and **do not share it publicly**. + +```bash copy +export HATCHET_CLIENT_TOKEN="" +``` + +If you are a self-hosted user without TLS enabled, also set: + +```bash copy +export HATCHET_CLIENT_TLS_STRATEGY=none +``` + +Then run your worker: + +```bash copy +npx ts-node src/worker.ts +``` diff --git a/frontend/docs/pages/_setup/_new/go.mdx b/frontend/docs/pages/v1/_setup/_new/go.mdx similarity index 54% rename from frontend/docs/pages/_setup/_new/go.mdx rename to frontend/docs/pages/v1/_setup/_new/go.mdx index 03bbc3251b..1ecca8cd41 100644 --- a/frontend/docs/pages/_setup/_new/go.mdx +++ b/frontend/docs/pages/v1/_setup/_new/go.mdx @@ -40,3 +40,29 @@ func main() { } } ``` + +#### Start a Worker + +The fastest way to run a worker during development is with the Hatchet CLI: + +```bash copy +hatchet worker dev +``` + +You can also run the worker script directly. This requires a `HATCHET_CLIENT_TOKEN` environment variable. You can generate an API token from the Hatchet dashboard by navigating to the `Settings` tab and clicking `API Tokens`. Click `Generate API Token` to create a new token, and **do not share it publicly**. + +```bash copy +export HATCHET_CLIENT_TOKEN="" +``` + +If you are a self-hosted user without TLS enabled, also set: + +```bash copy +export HATCHET_CLIENT_TLS_STRATEGY=none +``` + +Then run your worker: + +```bash copy +go run main.go +``` diff --git a/frontend/docs/pages/_setup/_new/py.mdx b/frontend/docs/pages/v1/_setup/_new/py.mdx similarity index 60% rename from frontend/docs/pages/_setup/_new/py.mdx rename to frontend/docs/pages/v1/_setup/_new/py.mdx index 1f9b2693b8..05a93442f7 100644 --- a/frontend/docs/pages/_setup/_new/py.mdx +++ b/frontend/docs/pages/v1/_setup/_new/py.mdx @@ -41,3 +41,29 @@ You can now import the Hatchet Client in any file that needs it. ```python copy from src.hatchet_client import hatchet ``` + +#### Start a Worker + +The fastest way to run a worker during development is with the Hatchet CLI: + +```bash copy +hatchet worker dev +``` + +You can also run the worker script directly. This requires a `HATCHET_CLIENT_TOKEN` environment variable. You can generate an API token from the Hatchet dashboard by navigating to the `Settings` tab and clicking `API Tokens`. Click `Generate API Token` to create a new token, and **do not share it publicly**. + +```bash copy +export HATCHET_CLIENT_TOKEN="" +``` + +If you are a self-hosted user without TLS enabled, also set: + +```bash copy +export HATCHET_CLIENT_TLS_STRATEGY=none +``` + +Then run your worker: + +```bash copy +python worker.py +``` diff --git a/frontend/docs/pages/_setup/_new/ts.mdx b/frontend/docs/pages/v1/_setup/_new/ts.mdx similarity index 58% rename from frontend/docs/pages/_setup/_new/ts.mdx rename to frontend/docs/pages/v1/_setup/_new/ts.mdx index 34e8c26ba6..3994a44dca 100644 --- a/frontend/docs/pages/_setup/_new/ts.mdx +++ b/frontend/docs/pages/v1/_setup/_new/ts.mdx @@ -37,3 +37,29 @@ Add the following code to the file: You can now import the Hatchet Client in any file that needs it. + +#### Start a Worker + +The fastest way to run a worker during development is with the Hatchet CLI: + +```bash copy +hatchet worker dev +``` + +You can also run the worker script directly. This requires a `HATCHET_CLIENT_TOKEN` environment variable. You can generate an API token from the Hatchet dashboard by navigating to the `Settings` tab and clicking `API Tokens`. Click `Generate API Token` to create a new token, and **do not share it publicly**. + +```bash copy +export HATCHET_CLIENT_TOKEN="" +``` + +If you are a self-hosted user without TLS enabled, also set: + +```bash copy +export HATCHET_CLIENT_TLS_STRATEGY=none +``` + +Then run your worker: + +```bash copy +npx ts-node src/worker.ts +``` diff --git a/frontend/docs/pages/_setup/clone.mdx b/frontend/docs/pages/v1/_setup/clone.mdx similarity index 100% rename from frontend/docs/pages/_setup/clone.mdx rename to frontend/docs/pages/v1/_setup/clone.mdx diff --git a/frontend/docs/pages/_setup/existing.mdx b/frontend/docs/pages/v1/_setup/existing.mdx similarity index 100% rename from frontend/docs/pages/_setup/existing.mdx rename to frontend/docs/pages/v1/_setup/existing.mdx diff --git a/frontend/docs/pages/_setup/new.mdx b/frontend/docs/pages/v1/_setup/new.mdx similarity index 100% rename from frontend/docs/pages/_setup/new.mdx rename to frontend/docs/pages/v1/_setup/new.mdx diff --git a/frontend/docs/pages/_setup/tabs.mdx b/frontend/docs/pages/v1/_setup/tabs.mdx similarity index 100% rename from frontend/docs/pages/_setup/tabs.mdx rename to frontend/docs/pages/v1/_setup/tabs.mdx diff --git a/frontend/docs/pages/home/additional-metadata.mdx b/frontend/docs/pages/v1/additional-metadata.mdx similarity index 95% rename from frontend/docs/pages/home/additional-metadata.mdx rename to frontend/docs/pages/v1/additional-metadata.mdx index ab11da06b1..66354188fc 100644 --- a/frontend/docs/pages/home/additional-metadata.mdx +++ b/frontend/docs/pages/v1/additional-metadata.mdx @@ -1,5 +1,5 @@ import { Callout, Card, Cards, Steps, Tabs } from "nextra/components"; -import UniversalTabs from "../../components/UniversalTabs"; +import UniversalTabs from "@/components/UniversalTabs"; import { Snippet } from "@/components/code"; import { snippets } from "@/lib/generated/snippets"; @@ -39,7 +39,7 @@ You can attach additional metadata when pushing events or triggering task runs u - + + diff --git a/frontend/docs/pages/home/worker-affinity.mdx b/frontend/docs/pages/v1/advanced-assignment/worker-affinity.mdx similarity index 97% rename from frontend/docs/pages/home/worker-affinity.mdx rename to frontend/docs/pages/v1/advanced-assignment/worker-affinity.mdx index 1c1ff31324..00a48cc01b 100644 --- a/frontend/docs/pages/home/worker-affinity.mdx +++ b/frontend/docs/pages/v1/advanced-assignment/worker-affinity.mdx @@ -1,7 +1,7 @@ import { snippets } from "@/lib/generated/snippets"; import { Snippet } from "@/components/code"; import { Tabs, Callout } from "nextra/components"; -import UniversalTabs from "../../components/UniversalTabs"; +import UniversalTabs from "@/components/UniversalTabs"; # Worker Affinity Assignment (Beta) @@ -60,7 +60,7 @@ You can specify desired worker label state for specific tasks in a workflow by s - `required` (default: `true`): Whether the label is required for the task to run. If `true`, the task will remain in a pending state until a worker with the desired label state becomes available. If `false`, the worker will be prioritized based on the sum of the highest matching weights. - `weight` (optional, default: `100`): The weight of the label. Higher weights are prioritized over lower weights when selecting a worker for the task. If multiple workers have the same highest weight, the worker with the highest sum of weights will be selected. Ignored if `required` is `true`. - + @@ -120,7 +120,7 @@ You can specify desired worker label state for specific tasks in a workflow by s Labels can also be set dynamically on workers using the `upsertLabels` method. This can be useful when worker state changes over time, such as when a new model is loaded into memory or when a worker's resource availability changes. - + diff --git a/frontend/docs/pages/v1/advanced-tasks/_meta.js b/frontend/docs/pages/v1/advanced-tasks/_meta.js new file mode 100644 index 0000000000..44e5c703a0 --- /dev/null +++ b/frontend/docs/pages/v1/advanced-tasks/_meta.js @@ -0,0 +1,6 @@ +export default { + index: { display: "hidden" }, + cancellation: "Cancellation", + streaming: "Streaming", + "additional-metadata": "Additional Metadata", +}; diff --git a/frontend/docs/pages/v1/advanced-tasks/additional-metadata.mdx b/frontend/docs/pages/v1/advanced-tasks/additional-metadata.mdx new file mode 100644 index 0000000000..66354188fc --- /dev/null +++ b/frontend/docs/pages/v1/advanced-tasks/additional-metadata.mdx @@ -0,0 +1,84 @@ +import { Callout, Card, Cards, Steps, Tabs } from "nextra/components"; +import UniversalTabs from "@/components/UniversalTabs"; +import { Snippet } from "@/components/code"; +import { snippets } from "@/lib/generated/snippets"; + +# Additional Metadata + +Hatchet allows you to attach arbitrary key-value string pairs to events and task runs, which can be used for filtering, searching, or any other lookup purposes. This additional metadata is not part of the event payload or task input data but provides supplementary information for better organization and discoverability. + + + Additional metadata can be added to `Runs`, `Scheduled Runs`, `Cron Runs`, and + `Events`. The data is propagated from parents to children or from events to + runs. + + +You can attach additional metadata when pushing events or triggering task runs using the Hatchet client libraries: + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +## Filtering in the Dashboard + +Once you've attached additional metadata to events or task runs, this data will be available in the Event and Task Run list views in the Hatchet dashboard. You can use the filter input field to search for events or task runs based on the additional metadata key-value pairs you've attached. + +For example, you can filter events by the `source` metadata keys to quickly find events originating from a specific source or environment. + +![Blocks](/addl-meta.gif) + +## Use Cases + +Some common use cases for additional metadata include: + +- Tagging events or task runs with environment information (e.g., `production`, `staging`, `development`) +- Specifying the source or origin of events (e.g., `api`, `webhook`, `manual`) +- Categorizing events or task runs based on business-specific criteria (e.g., `priority`, `region`, `product`) + +By leveraging additional metadata, you can enhance the organization, searchability, and discoverability of your events and task runs within Hatchet. diff --git a/frontend/docs/pages/home/cancellation.mdx b/frontend/docs/pages/v1/advanced-tasks/cancellation.mdx similarity index 100% rename from frontend/docs/pages/home/cancellation.mdx rename to frontend/docs/pages/v1/advanced-tasks/cancellation.mdx diff --git a/frontend/docs/pages/v1/advanced-tasks/index.mdx b/frontend/docs/pages/v1/advanced-tasks/index.mdx new file mode 100644 index 0000000000..661d465842 --- /dev/null +++ b/frontend/docs/pages/v1/advanced-tasks/index.mdx @@ -0,0 +1,11 @@ +--- +title: Advanced Task Features +description: Cancellation, streaming, and other advanced task capabilities. +--- + +# Advanced Task Features + +Advanced capabilities for task execution. + +- [Cancellation](/v1/advanced-tasks/cancellation) — Cancel running tasks +- [Streaming](/v1/advanced-tasks/streaming) — Stream data from tasks in real time diff --git a/frontend/docs/pages/home/streaming.mdx b/frontend/docs/pages/v1/advanced-tasks/streaming.mdx similarity index 93% rename from frontend/docs/pages/home/streaming.mdx rename to frontend/docs/pages/v1/advanced-tasks/streaming.mdx index ca5715e17c..dd286869d3 100644 --- a/frontend/docs/pages/home/streaming.mdx +++ b/frontend/docs/pages/v1/advanced-tasks/streaming.mdx @@ -36,9 +36,9 @@ This task will stream small chunks of content through Hatchet, which can then be ## Consuming Streams -You can easily consume stream events by using the stream method on the workflow run ref that the various [fire-and-forget](./run-no-wait.mdx) methods return. +You can easily consume stream events by using the stream method on the workflow run ref that the various [fire-and-forget](/v1/running-your-task#fire-and-forget) methods return. - + @@ -80,7 +80,7 @@ It's common to want to stream events out of a Hatchet task and back to the front In both cases, we recommend using your application's backend as a proxy for the stream, where you would subscribe to the stream of events from Hatchet, and then stream events through to the frontend as they're received by the backend. - + For example, with FastAPI, you'd do the following: diff --git a/frontend/docs/pages/v1/architecture-and-guarantees.mdx b/frontend/docs/pages/v1/architecture-and-guarantees.mdx new file mode 100644 index 0000000000..6a0be0ed8f --- /dev/null +++ b/frontend/docs/pages/v1/architecture-and-guarantees.mdx @@ -0,0 +1,146 @@ +import { Callout } from "nextra/components"; + +# Architecture & Guarantees + +This page explains how Hatchet is put together, what the main components do, and what reliability guarantees you should design your workers around. + +## Architecture overview + +Hatchet has three main moving pieces: + +- **API server**: the HTTP surface area for triggering workflows, querying state, and powering the UI +- **Engine**: schedules and dispatches work, enforces dependencies/policies, and records state transitions durably +- **Workers**: your processes that run the actual task code + +State is stored durably (PostgreSQL is the source of truth). In many deployments that’s enough—no separate broker required—while self-hosted high-throughput setups can add additional components (for example, RabbitMQ) based on your needs. + +Hatchet Cloud and self-hosted Hatchet share the same architecture; the difference is who runs and operates the Hatchet services. + +```mermaid +graph LR + subgraph "External (Optional)" + EXT[Webhooks
Events] + end + + subgraph "Your Infrastructure" + APP[Your API, App, Service, etc.] + W[Workers] + end + + subgraph "Hatchet" + API[API Server] + ENG[Engine] + DB[(Database)] + end + + EXT --> API + APP <--> API + API --> ENG + ENG <--> DB + API <--> DB + ENG <-.->|gRPC| W + + classDef userInfra fill:#e3f2fd,stroke:#1976d2,stroke-width:2px,color:#0d47a1 + classDef hatchet fill:#f1f8e9,stroke:#388e3c,stroke-width:2px,color:#1b5e20 + classDef external fill:#fff8e1,stroke:#f57c00,stroke-width:2px,color:#e65100 + + class APP,W userInfra + class API,ENG,DB hatchet + class EXT external +``` + +## Core components + +### API server + +The API server is the front door to Hatchet. It’s what your application and the Hatchet UI talk to in order to: + +- trigger workflows with input data +- query workflow/task state (and, where supported, subscribe to updates) +- manage resources like schedules and settings +- ingest webhooks/events (optional) + +### Engine + +The engine is responsible for turning “a workflow should run” into “these tasks are ready and should be executed.” In practice, it: + +- evaluates workflow dependencies +- enforces policies like concurrency limits, rate limits, and priorities +- schedules ready tasks and dispatches them to connected workers +- records state transitions durably and applies retry/timeout behavior +- runs scheduled/cron workflows + +Workers connect to the engine over bidirectional gRPC, which allows low-latency dispatch and frequent status updates. + +### Workers + +Workers are your processes. They connect to the engine, receive tasks, run your code, and report progress/results back to Hatchet. + +Workers are intentionally flexible: you can run them locally, in containers, or on VMs, and you can scale workers independently from the Hatchet services. You can also run different “types” of workers (and even different languages) depending on what your system needs. + +### Storage (and optional messaging) + +PostgreSQL is the durable store for workflow definitions and execution state (queued/running/completed, inputs/outputs, retries, etc.). In self-hosted deployments, you can start with PostgreSQL-only and add components like RabbitMQ if you need higher throughput. + +## Guarantees & tradeoffs + +Hatchet aims to sit in the middle: more structure than a simple queue, but simpler to operate than a full distributed workflow system. + +### Good fit for + +- **Workflow orchestration** with dependencies, retries, and timeouts +- **Durable background jobs** where “don’t lose work” matters +- **Moderate to high throughput** systems (and a path to higher scale with tuning/sharding). If you’re pushing the limits, [contact us](https://hatchet.run/contact). +- **Multi-language / polyglot workers** +- **Teams already on PostgreSQL** who want operational simplicity +- **Cloud or air-gapped environments** ([Hatchet Cloud](https://cloud.onhatchet.run) or [self-hosting](/self-hosting)) + +### Not a good fit for + +- **Extremely high throughput** without sharding/custom tuning (for example, sustaining 10,000+ tasks/sec) +- **Sub-millisecond dispatch latency** requirements +- **In-memory-only queuing** where durability is unnecessary +- **Serverless-only runtimes** (e.g. AWS Lambda / Cloud Functions) as your primary worker model + +## Core reliability guarantees + +### At-least-once task execution + +Hatchet is **at least once**: tasks are not silently dropped, and failures retry according to your configuration. This also means **a task can run more than once**, so your task code should be **idempotent** (or otherwise safe to retry). + +### Durable state transitions + +Workflow state is persisted in PostgreSQL, and state transitions are performed transactionally. This helps keep dependency resolution consistent and makes the system resilient to restarts and transient failures. + +### Execution policies are explicit + +By default, task assignment is FIFO, and you can change execution behavior using: + +- [Concurrency policies](/v1/concurrency) +- [Rate limits](/v1/rate-limits) +- [Priorities](/v1/priority) + +### Stateless services; resilient workers + +The engine and API server are designed to restart without losing state, which also enables horizontal scaling by running multiple instances. Workers reconnect after network interruptions and can run close to your services (or close to Hatchet) depending on your latency goals. + +## Performance expectations + +Real-world performance depends heavily on topology (worker ↔ engine network latency), database sizing, and workload shape. + +- **Dispatch latency**: often sub-50ms with PostgreSQL-backed storage; in optimized, “hot worker” setups it can be closer to ~25ms P95. +- **Throughput**: varies by setup. PostgreSQL-only deployments often handle hundreds of tasks/sec per engine instance; higher throughput typically requires additional tuning and/or components like RabbitMQ. With tuning and sharding, Hatchet can scale into the high tens of thousands of tasks/sec—[contact us](https://hatchet.run/contact) if you want to design for that. +- **Common bottlenecks**: DB connection limits, large payloads (e.g. > 1MB), complex dependency graphs, and cross-region latency. + + + +**Not seeing expected performance?** + +If you're not seeing the performance you expect, please [reach out to us](https://hatchet.run/office-hours) or [join our community](https://hatchet.run/discord) to explore tuning options. + + + +## Next Steps + +- **[Quick Start](/v1/quickstart)**: set up your first Hatchet worker +- **[Self-Hosting](/self-hosting)**: deploy Hatchet on your own infrastructure diff --git a/frontend/docs/pages/home/autoscaling-workers.mdx b/frontend/docs/pages/v1/autoscaling-workers.mdx similarity index 100% rename from frontend/docs/pages/home/autoscaling-workers.mdx rename to frontend/docs/pages/v1/autoscaling-workers.mdx diff --git a/frontend/docs/pages/home/bulk-retries-and-cancellations.mdx b/frontend/docs/pages/v1/bulk-retries-and-cancellations.mdx similarity index 99% rename from frontend/docs/pages/home/bulk-retries-and-cancellations.mdx rename to frontend/docs/pages/v1/bulk-retries-and-cancellations.mdx index 418b114094..6d639047d6 100644 --- a/frontend/docs/pages/home/bulk-retries-and-cancellations.mdx +++ b/frontend/docs/pages/v1/bulk-retries-and-cancellations.mdx @@ -81,7 +81,7 @@ The first way to bulk cancel or replay runs is by providing a list of run ids. T The second way to bulk cancel or replay runs is by providing a list of filters. This is the most powerful way to cancel or replay runs in bulk, as it allows you to cancel or replay all runs matching a set of arbitrary filters without needing to provide IDs for the runs in advance. - + {/* TODO V1 DOCS - Add TS */} diff --git a/frontend/docs/pages/home/bulk-run.mdx b/frontend/docs/pages/v1/bulk-run.mdx similarity index 100% rename from frontend/docs/pages/home/bulk-run.mdx rename to frontend/docs/pages/v1/bulk-run.mdx diff --git a/frontend/docs/pages/v1/cancellation.mdx b/frontend/docs/pages/v1/cancellation.mdx new file mode 100644 index 0000000000..7fce04cd7e --- /dev/null +++ b/frontend/docs/pages/v1/cancellation.mdx @@ -0,0 +1,68 @@ +import { Callout, Card, Cards, Steps, Tabs } from "nextra/components"; +import UniversalTabs from "@/components/UniversalTabs"; +import { Snippet } from "@/components/code"; +import { snippets } from "@/lib/generated/snippets"; + +# Cancellation in Hatchet Tasks + +Hatchet provides a mechanism for canceling task executions gracefully, allowing you to signal to running tasks that they should stop running. Cancellation can be triggered on graceful termination of a worker or automatically through concurrency control strategies like [`CANCEL_IN_PROGRESS`](./concurrency.mdx#cancel_in_progress), which cancels currently running task instances to free up slots for new instances when the concurrency limit is reached. + +When a task is canceled, Hatchet sends a cancellation signal to the task. The task can then check for the cancellation signal and take appropriate action, such as cleaning up resources, aborting network requests, or gracefully terminating their execution. + +## Cancellation Mechanisms + + + + + + + + + + + + + + + + + + + + + +## Cancellation Best Practices + +When working with cancellation in Hatchet tasks, consider the following best practices: + +1. **Graceful Termination**: When a task receives a cancellation signal, aim to terminate its execution gracefully. Clean up any resources, abort pending operations, and perform any necessary cleanup tasks before returning from the task function. + +2. **Cancellation Checks**: Regularly check for cancellation signals within long-running tasks or loops. This allows the task to respond to cancellation in a timely manner and avoid unnecessary processing. + +3. **Asynchronous Operations**: If a task performs asynchronous operations, such as network requests or file I/O, consider passing the cancellation signal to those operations. Many libraries and APIs support cancellation through the `AbortSignal` interface. + +4. **Error Handling**: Handle cancellation errors appropriately. Distinguish between cancellation errors and other types of errors to provide meaningful error messages and take appropriate actions. + +5. **Cancellation Propagation**: If a task invokes other functions or libraries, consider propagating the cancellation signal to those dependencies. This ensures that cancellation is handled consistently throughout the task. + +## Additional Features + +In addition to the methods of cancellation listed here, Hatchet also supports [bulk cancellation](./bulk-retries-and-cancellations.mdx), which allows you to cancel many tasks in bulk using either their IDs or a set of filters, which is often the easiest way to cancel many things at once. + +## Conclusion + +Cancellation is a powerful feature in Hatchet that allows you to gracefully stop task executions when needed. Remember to follow best practices when implementing cancellation in your tasks, such as graceful termination, regular cancellation checks, handling asynchronous operations, proper error handling, and cancellation propagation. + +By incorporating cancellation into your Hatchet tasks and workflows, you can build more resilient and responsive systems that can adapt to changing circumstances and user needs. diff --git a/frontend/docs/pages/v1/child-spawning.mdx b/frontend/docs/pages/v1/child-spawning.mdx new file mode 100644 index 0000000000..66511eed74 --- /dev/null +++ b/frontend/docs/pages/v1/child-spawning.mdx @@ -0,0 +1,274 @@ +import { Callout, Cards, Tabs } from "nextra/components"; +import UniversalTabs from "@/components/UniversalTabs"; +import { Snippet } from "@/components/code"; +import { snippets } from "@/lib/generated/snippets"; +import CycleDiagram from "@/components/CycleDiagramWrapper"; +import BatchProcessingDiagram from "@/components/BatchProcessingDiagramWrapper"; +import FanoutDiagram from "@/components/FanoutDiagram"; + +# Child Spawning + +A task can spawn child tasks at runtime, including other durable tasks or entire DAG workflows. Children run independently on any available worker, and the parent can wait for their results. + +Both durable tasks and DAG tasks support child spawning with the same core API. The key difference is that durable tasks free the parent's worker slot while waiting (via [eviction](/v1/durable-workflows/task-eviction)), while DAG tasks hold their slot for the duration of execution. + + + + +## Spawning from Durable Tasks + +A durable task can spawn child tasks at runtime. This is one of the core reasons to choose durable tasks over DAGs: the shape of work is decided as the task runs, not declared upfront. + + + Waiting for child results puts the parent task into an [evictable + state](/v1/durable-workflows/task-eviction), the worker slot is freed and the + parent is re-queued when results are available. + + +Because the parent is evicted while children execute: + +- **No slot waste** — the parent doesn't hold a worker slot while N children run across your fleet. +- **No deadlocks** — because the parent is evicted, it can't starve its own children for slots. +- **Dynamic N** — you decide how many children to spawn based on runtime data (input size, API responses, agent reasoning). + +### Spawning child tasks + +Use the context object to spawn a child task from within a durable task. The child runs independently on any available worker. + + + + + + + + + + + + + + + + + + + + + + +### Parallel fan-out + +Spawn many children at once and wait for all results. The parent is evicted during the wait, so it consumes no resources while children run. + + + + + + + + + + + + + + + + + + + + + + +### What children can be + +A durable task can spawn any runnable: + +| Child type | Example | +| ---------------- | --------------------------------------------------------------------------------- | +| **Regular task** | Spawn a stateless task for a quick computation or API call. | +| **Durable task** | Spawn another durable task that has its own checkpoints, sleeps, and event waits. | +| **DAG workflow** | Spawn an entire multi-task workflow and wait for its final output. | + +### Error handling + + + + + + + + + + + + + + + + + + + + + + + + + +## Spawning from DAG Tasks + +DAG tasks can also spawn child tasks procedurally during execution. This lets you combine a fixed pipeline structure with dynamic child work inside individual tasks. + +### Creating parent and child tasks + +To implement child task spawning, you first need to create both parent and child task definitions. + + + + +First, we'll declare a couple of tasks for the parent and child: + + + +We also created a step on the parent task that spawns the child tasks. Now, we'll add a couple of steps to the child task: + + + +And that's it! The fanout parent will run and spawn the child, and then will collect the results from its steps. + + + + + + + + + + + + + + + + + + +### Running child tasks + +To spawn and run a child task from a parent task, use the appropriate method for your language: + + + + + + + + + + + + + + + + + + + + + + +### Parallel child task execution + +Spawn multiple child tasks in parallel: + + + + + + + + + + + + + + + + + + + + + + +### Error handling + + + + + + + + + + + + + + + + + + + + + + + + + +## Common Patterns + +### Dynamic fan-out / fan-in + +Process a list of items whose length is only known at runtime. Spawn one child per item, collect all results, then continue. [Document processing](/guides/document-processing) and [batch processing](/guides/batch-processing) are canonical examples: when a batch of files arrives, a parent fans out to one child per document; each child parses, extracts, and validates its document in parallel across your worker fleet. + + + +[Concurrency](/v1/concurrency) controls how many children run simultaneously. Hatchet distributes child tasks across available workers, so adding workers increases throughput without code changes. For rate-limited external services (OCR, LLM APIs), combine with [Rate Limits](/v1/rate-limits) to throttle child execution across all workers. + +### Agent loops + +An **agent loop** is implemented by having a durable task spawn a new child run of itself with updated input until a termination condition is met. Each iteration is a separate child task, giving full observability in the dashboard. AI agents use this pattern when they reason about what to do, spawn a subtask (or a sub-workflow), inspect the result, and decide whether to continue, branch, or stop. See [AI Agents](/guides/ai-agents/reasoning-loop) for a detailed guide. + + + +### Recursive workflows + +A durable task spawns child durable tasks, each of which may spawn their own children. This creates a tree of work that's entirely driven by runtime logic, useful for crawlers, recursive search, and tree-structured computations. + +## Use cases + +1. **Dynamic fan-out processing** — When the number of parallel tasks is determined at runtime. See [Batch Processing](/guides/batch-processing) and [Document Processing](/guides/document-processing). +2. **Reusable workflow components** — Create modular workflows that can be reused across different parent workflows. +3. **Resource-intensive operations** — Spread computation across multiple workers. +4. **Agent-based systems** — Allow AI agents to spawn new workflows based on their reasoning. See [AI Agents](/guides/ai-agents/reasoning-loop). +5. **Long-running operations** — Break down long operations into smaller, trackable units of work. diff --git a/frontend/docs/pages/v1/cloud-vs-oss.mdx b/frontend/docs/pages/v1/cloud-vs-oss.mdx new file mode 100644 index 0000000000..1620468fab --- /dev/null +++ b/frontend/docs/pages/v1/cloud-vs-oss.mdx @@ -0,0 +1,67 @@ +# Cloud vs OSS + +Hatchet is available as **Hatchet Cloud** (managed) and as **open source** (self-hosted). The programming model is the same: you write tasks/workflows in code and run workers that connect to Hatchet. + +This page helps you decide which deployment model fits your team. + +## Quick decision guide + +Choose **Hatchet Cloud** if you want: + +- the Hatchet control plane operated for you (upgrades, scaling, backups) +- the fastest path to production +- a status page and managed incident response + +Choose **self-hosted (OSS)** if you need: + +- full control over infrastructure and networking +- strict data residency or air-gapped environments +- a deployment you can customize and operate with your own tooling + +## What’s the same in both + +- **SDK + worker model**: your workers run your code and connect to Hatchet +- **Durability + retries**: tasks are durably tracked and retry according to configuration +- **Observability surfaces**: you can inspect runs, workers, and workflow history +- **Core semantics**: the same workflows/tasks/concurrency patterns apply + +## What changes (operational responsibilities) + +### Hatchet Cloud (managed) + +Hatchet runs and operates the Hatchet services. You bring: + +- your worker processes +- your application code that triggers workflows +- your operational policies (timeouts, retries, concurrency, rate limits) + +For security and compliance documentation, see the **[Hatchet Trust Center](https://trust.hatchet.run/)**. For current incidents and historical uptime, see **[status.hatchet.run](https://status.hatchet.run/)**. + +### Self-hosted (OSS) + +You run and operate the Hatchet services and their dependencies. Typical responsibilities include: + +- provisioning and scaling the Hatchet services +- managing PostgreSQL (and any optional components you deploy) +- backups, upgrades, and monitoring +- network security and access control for the API/DB + +If you’re planning production usage, start with: + +- [Self Hosting](/self-hosting) +- [High Availability](/self-hosting/high-availability) +- [Security](/v1/security) + +## Migrating between Cloud and self-hosted + +You can move between deployment models without rewriting worker code. In practice, migration usually means: + +- pointing workers and clients at a new endpoint +- swapping credentials/tokens +- validating environment-specific settings (TLS, networking, retention, etc.) + +## Next steps + +- **[Quickstart](/v1/quickstart)**: run a worker and trigger your first workflow +- **[Architecture & Guarantees](/v1/architecture-and-guarantees)**: understand reliability semantics and tradeoffs +- **[Self Hosting](/self-hosting)**: deploy Hatchet on your own infrastructure diff --git a/frontend/docs/pages/home/compute/_meta.js b/frontend/docs/pages/v1/compute/_meta.js similarity index 100% rename from frontend/docs/pages/home/compute/_meta.js rename to frontend/docs/pages/v1/compute/_meta.js diff --git a/frontend/docs/pages/home/compute/auto-scaling.mdx b/frontend/docs/pages/v1/compute/auto-scaling.mdx similarity index 100% rename from frontend/docs/pages/home/compute/auto-scaling.mdx rename to frontend/docs/pages/v1/compute/auto-scaling.mdx diff --git a/frontend/docs/pages/home/compute/cpu.mdx b/frontend/docs/pages/v1/compute/cpu.mdx similarity index 100% rename from frontend/docs/pages/home/compute/cpu.mdx rename to frontend/docs/pages/v1/compute/cpu.mdx diff --git a/frontend/docs/pages/home/compute/environment-variables.mdx b/frontend/docs/pages/v1/compute/environment-variables.mdx similarity index 100% rename from frontend/docs/pages/home/compute/environment-variables.mdx rename to frontend/docs/pages/v1/compute/environment-variables.mdx diff --git a/frontend/docs/pages/home/compute/getting-started.mdx b/frontend/docs/pages/v1/compute/getting-started.mdx similarity index 100% rename from frontend/docs/pages/home/compute/getting-started.mdx rename to frontend/docs/pages/v1/compute/getting-started.mdx diff --git a/frontend/docs/pages/home/compute/git-ops.mdx b/frontend/docs/pages/v1/compute/git-ops.mdx similarity index 100% rename from frontend/docs/pages/home/compute/git-ops.mdx rename to frontend/docs/pages/v1/compute/git-ops.mdx diff --git a/frontend/docs/pages/home/compute/gpu.mdx b/frontend/docs/pages/v1/compute/gpu.mdx similarity index 100% rename from frontend/docs/pages/home/compute/gpu.mdx rename to frontend/docs/pages/v1/compute/gpu.mdx diff --git a/frontend/docs/pages/home/compute/index.mdx b/frontend/docs/pages/v1/compute/index.mdx similarity index 100% rename from frontend/docs/pages/home/compute/index.mdx rename to frontend/docs/pages/v1/compute/index.mdx diff --git a/frontend/docs/pages/home/concurrency.mdx b/frontend/docs/pages/v1/concurrency.mdx similarity index 99% rename from frontend/docs/pages/home/concurrency.mdx rename to frontend/docs/pages/v1/concurrency.mdx index d0a71206f9..7383363554 100644 --- a/frontend/docs/pages/home/concurrency.mdx +++ b/frontend/docs/pages/v1/concurrency.mdx @@ -130,7 +130,7 @@ The `CANCEL_NEWEST` strategy is particularly useful in scenarios where: You can also combine multiple concurrency strategies to create a more complex concurrency control system. For example, you can use one group key to represent a specific team, and another group to represent a specific resource in that team, giving you more control over the rate at which tasks are executed. - + + + + + +## Procedural Branching + +Durable tasks use standard language control flow (`if`/`else`, `match`, loops) to branch at runtime. Because the task is a single long-running function, you can make decisions based on any data available during execution: inputs, intermediate results, API responses, or child task outputs. + +```python +@workflow.durable_task() +async def process(input: ProcessInput, ctx: DurableContext): + result = await ctx.run_child(analyze_task, input) + + if result["score"] > 0.8: + await ctx.run_child(fast_path_task, result) + else: + await ctx.run_child(slow_path_task, result) + await ctx.run_child(review_task, result) +``` + +This is one of the key advantages of durable tasks: branching logic is expressed directly in code, making it easy to handle complex, dynamic flows. Each branch can spawn different children, sleep for different durations, or wait for different events. + + + Branching logic must be **deterministic** between checkpoints. If the task is + evicted and replayed, the same branches must execute in the same order. Base + decisions on checkpoint outputs (child results, event payloads) rather than + wall-clock time or external state that may change between replays. See [Best + Practices](/v1/patterns/mixing-patterns#determinism-in-durable-tasks) for + details. + + +## Or Groups + +Durable tasks can combine multiple wait conditions using or groups. An or group evaluates to `True` if **at least one** of its conditions is satisfied, letting you express "proceed on timeout or event, whichever comes first." + + + + + + +`or_()` wraps a `SleepCondition` and a `UserEventCondition` into a single or group. The task will resume as soon as either the sleep expires or the event arrives. + + + + + + + + + + + + + + + + + + + + + + +## Parent Conditions + +Parent conditions let a DAG task decide whether to run based on the output of a parent task. This enables branching logic within a DAG: different paths can execute depending on runtime data, while the overall graph structure remains fixed and visible in the dashboard. + +Parent conditions can be used with two operators: + +- **`skip_if`** — skip the task if the parent output matches the condition. +- **`cancel_if`** — cancel the task (and its downstream dependents) if the parent output matches the condition. + + + A task cancelled by `cancel_if` behaves like any other cancellation in Hatchet + — downstream tasks will be cancelled as well. + + +### Branching example + +A common pattern is to create two sibling tasks with complementary parent conditions. For example, one task runs when a value is greater than 50 and the other runs when it is less than or equal to 50. Only one branch executes per run. + +First, declare a base task that returns a value: + + + + + + + + + + + + + + + + +Then add two branches that use `ParentCondition` with `skip_if`: + + + + + + + + + + + + + + + + +These two tasks check whether the output of the base task was greater or less than `50`, respectively. Only one of the two will run per workflow execution. + +### Checking if a task was skipped + +Downstream tasks can check whether a parent was skipped using `ctx.was_skipped`: + + + + + + + + + + + + + + + + + +## Or Groups + +DAG tasks can declare multiple conditions that work together to control when and whether a task runs. Conditions of different types (parent conditions, [event conditions](/v1/durable-workflows/events), and [sleep conditions](/v1/durable-workflows/sleep)) can be mixed on a single task using **or groups**. + +An **or group** is a set of conditions combined with an `Or` operator. The group evaluates to `True` if **at least one** of its conditions is satisfied. Multiple or groups on the same task are combined with `AND`, so every group must have at least one satisfied condition for the task to proceed. + +This lets you express arbitrarily complex sets of conditions in [conjunctive normal form](https://en.wikipedia.org/wiki/Conjunctive_normal_form) (CNF). + +### Sleep + Event example + +The most common combination is a sleep condition with an event condition: proceed when an external signal arrives _or_ after a timeout (whichever comes first). This is ideal for [human-in-the-loop](/guides/human-in-the-loop) workflows where you want a deadline. + + + + + + `or_()` wraps a `SleepCondition` and a `UserEventCondition` into a single or group. The task will start as soon as either the sleep expires or the event arrives. + + + + + + `Or()` wraps a `SleepCondition` and a `UserEventCondition` into a single or group. The task will start as soon as either the sleep expires or the event arrives. + + + + + + `hatchet.WithWaitFor` and `hatchet.WithSkipIf` attach conditions to the task. The task will wait for the sleep to expire before starting, and will be skipped if the event arrives. + + + + + + `Hatchet.or_()` wraps a `SleepCondition` and a `UserEventCondition` into a single or group. The task will start as soon as either the sleep expires or the event arrives. + + + + +### Multiple or groups + +For more complex logic, you can declare multiple or groups on a single task. Consider three conditions: + +- **Condition A**: Parent output is greater than 50 +- **Condition B**: Sleep for 30 seconds +- **Condition C**: Receive the `payment:processed` event + +To proceed if (A _or_ B) **and** (A _or_ C), declare two or groups: + +1. Group 1: `A or B` +2. Group 2: `A or C` + +The task will run once both groups are satisfied. If A is true, both groups pass immediately. If A is false, the task needs both B (sleep expires) and C (event arrives). + +### Common combinations + +| Combination | Use case | +| -------------- | ------------------------------------------------------------------------------------ | +| Sleep + Event | Proceed after a timeout _or_ when an external signal arrives (whichever comes first) | +| Parent + Event | Proceed if a parent output meets a threshold _or_ a manual override event arrives | +| Parent + Sleep | Proceed if a parent indicates readiness _or_ after a maximum wait time | +| All three | Complex gates combining data-driven, time-based, and event-driven conditions | + + + diff --git a/frontend/docs/pages/home/cron-runs.mdx b/frontend/docs/pages/v1/cron-runs.mdx similarity index 91% rename from frontend/docs/pages/home/cron-runs.mdx rename to frontend/docs/pages/v1/cron-runs.mdx index 0367e45dc7..4ab20a2762 100644 --- a/frontend/docs/pages/home/cron-runs.mdx +++ b/frontend/docs/pages/v1/cron-runs.mdx @@ -5,7 +5,7 @@ import UniversalTabs from "@/components/UniversalTabs"; # Recurring Runs with Cron -> This example assumes we have a [task](./your-first-task.mdx) registered on a running [worker](./workers.mdx). +> This example assumes we have a [task](/v1/tasks) registered on a running [worker](/v1/workers). A [Cron](https://en.wikipedia.org/wiki/Cron) is a time-based job scheduler that allows you to define when a task should be executed automatically on a pre-determined schedule. @@ -17,9 +17,9 @@ Some example use cases for cron-style tasks might include: Hatchet supports cron triggers to run on a schedule defined in a few different ways: -- [Task Definitions](./cron-runs.mdx#defining-a-cron-in-your-task-definition): Define a cron expression in your task definition to trigger the task on a predefined schedule. -- [Dynamic Programmatically](./cron-runs.mdx#programmatically-creating-cron-triggers): Use the Hatchet SDKs to dynamically set the cron schedule of a task. -- [Hatchet Dashboard](./cron-runs.mdx#managing-cron-jobs-in-the-hatchet-dashboard): Manually create cron triggers from the Hatchet Dashboard. +- [Task Definitions](/v1/cron-runs#defining-a-cron-in-your-task-definition): Define a cron expression in your task definition to trigger the task on a predefined schedule. +- [Dynamic Programmatically](/v1/cron-runs#programmatically-creating-cron-triggers): Use the Hatchet SDKs to dynamically set the cron schedule of a task. +- [Hatchet Dashboard](/v1/cron-runs#managing-cron-jobs-in-the-hatchet-dashboard): Manually create cron triggers from the Hatchet Dashboard. The expression is when Hatchet **enqueues** the task, not when the run starts. @@ -99,6 +99,7 @@ Here's an example of creating a a cron to trigger a report for a specific custom @@ -132,6 +133,7 @@ You can delete a cron trigger by passing the cron object or a cron trigger id to @@ -162,6 +164,7 @@ Retrieves a list of all task cron triggers matching the criteria. @@ -200,4 +203,4 @@ When using cron triggers, there are a few considerations to keep in mind: 3. **Missed Schedules**: If a scheduled task is missed (e.g., due to system downtime), Hatchet will **not** automatically run the missed instances. It will wait for the next scheduled time to trigger the task. -4. **Overlapping Schedules**: If a task is still running when the next scheduled time arrives, Hatchet will start a new instance of the task or respect the [concurrency](./concurrency.mdx) policy. +4. **Overlapping Schedules**: If a task is still running when the next scheduled time arrives, Hatchet will start a new instance of the task or respect the [concurrency](/v1/concurrency) policy. diff --git a/frontend/docs/pages/v1/developer-experience.mdx b/frontend/docs/pages/v1/developer-experience.mdx new file mode 100644 index 0000000000..29ea373630 --- /dev/null +++ b/frontend/docs/pages/v1/developer-experience.mdx @@ -0,0 +1,40 @@ +# Developer experience + +Hatchet is designed to be practical day-to-day: write workflows in code, run workers locally with a tight feedback loop, and debug production runs with good visibility. + +## Workflows as code + +You define tasks and workflows in your application code, then trigger them with input data. Hatchet handles the operational pieces you’d otherwise build yourself: + +- **Durability** (work isn’t lost on restarts) +- **Retries/timeouts** +- **Concurrency and rate limiting** +- **Visibility into what ran, where, and why** + +## Dashboard (UI) + +The dashboard is where you go to understand “what is happening right now?”: + +- **Runs**: status, inputs/outputs, and execution history +- **Workers**: connected workers and health +- **Workflows**: definitions and recent activity +- **Settings**: tenants, API tokens, configuration + +It’s useful for debugging, operational checks, and ad-hoc triggers. + +## CLI + +The [Hatchet CLI](/reference/cli) is the fastest way to develop and operate Hatchet from your terminal: + +- **`hatchet worker dev`**: run a local worker with hot reload +- **`hatchet trigger`**: trigger a workflow from the command line (handy for smoke tests) +- **`hatchet tui`**: terminal UI for runs/workers/workflows +- **`hatchet profile`**: switch between tenants and environments + +See the [CLI reference](/reference/cli) for installation and the full command set. + +## Coding agents (MCP) + +If you use AI coding tools in your editor, Hatchet’s docs can be used via an [MCP (Model Context Protocol) server](/v1/using-coding-agents). We also publish “agent skills” (short, step-by-step playbooks) so coding agents can run common Hatchet workflows—like starting a worker, triggering a workflow, and debugging a run—without guessing at CLI usage. + +See [Using Coding Agents](/v1/using-coding-agents) for setup. diff --git a/frontend/docs/pages/home/docker.mdx b/frontend/docs/pages/v1/docker.mdx similarity index 99% rename from frontend/docs/pages/home/docker.mdx rename to frontend/docs/pages/v1/docker.mdx index 1702b02184..1ebcefc85e 100644 --- a/frontend/docs/pages/home/docker.mdx +++ b/frontend/docs/pages/v1/docker.mdx @@ -185,6 +185,7 @@ CMD ["node", "dist/worker.js"] Yarn's `--frozen-lockfile` ensures your dependencies match the lock file exactly. + ```dockerfile @@ -212,6 +213,7 @@ CMD ["/app/hatchet-worker"] ``` + {/* TODO-RUBY: verify and test this, possibly all dockerfiles */} ```dockerfile diff --git a/frontend/docs/pages/v1/durable-task-execution.mdx b/frontend/docs/pages/v1/durable-task-execution.mdx new file mode 100644 index 0000000000..5c574e828b --- /dev/null +++ b/frontend/docs/pages/v1/durable-task-execution.mdx @@ -0,0 +1,123 @@ +import { snippets } from "@/lib/generated/snippets"; +import { Snippet } from "@/components/code"; + +import { Callout, Steps } from "nextra/components"; +import DurableWorkflowDiagram from "@/components/DurableWorkflowDiagramWrapper"; + +# Durable Tasks + +Use durable tasks when **you don't know the shape of work ahead of time**. For example, an AI agent that picks its next action based on a model response, a fan-out where N is determined by the input data, or a pipeline that branches and spawns sub-workflows based on intermediate results. In all of these cases, the "graph" of work doesn't exist when the task starts; it emerges at runtime as the task makes decisions and [spawns children](/v1/durable-workflows/child-spawning). + +A durable task is a single long-running function that acts as an **orchestrator**: it spawns child tasks, waits for their results, makes decisions, and spawns more. Hatchet checkpoints its progress so it can recover from crashes, survive long waits, and resume on any worker without re-executing completed work. + + + If you know the full graph of work upfront (every task and dependency is fixed + before execution begins), use a [DAG](/v1/patterns/directed-acyclic-graphs) + instead. You can always [mix both patterns](/v1/patterns/mixing-patterns) in + the same application. + + +## When to Use Durable Tasks + +| Scenario | Why Durable? | +| --------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| **Dynamic fan-out** (N unknown) | Spawn children based on runtime data; wait for results without holding a slot. See [Batch Processing](/guides/batch-processing) and [Document Processing](/guides/document-processing). | +| **Agentic workflows** | An agent decides what to do next, spawns subtasks, loops, or stops at runtime. See [AI Agents](/guides/ai-agents/reasoning-loop). | +| **Long waits** (hours/days) | Worker slots are freed during waits; no wasted compute. | +| **Human-in-the-loop** | Wait for approval events without holding resources. See [Human-in-the-Loop](/guides/human-in-the-loop). | +| **Multi-step with inline pauses** | `SleepFor` and `WaitForEvent` let you express complex procedural flows. | +| **Crash-resilient pipelines** | Automatically resume from checkpoints after failures. | + +## How It Works + +A durable task builds the workflow at runtime through **child spawning**. The task function runs, inspects data, and decides what to do next by spawning child tasks. The parent is [evicted](/v1/durable-workflows/task-eviction) while children execute, freeing its worker slot. When children complete, the parent resumes from its checkpoint and continues. + +```mermaid +sequenceDiagram + participant P as Durable Task + participant H as Hatchet + participant W as Workers + + P->>H: Spawn Child A, Child B, Child C...N + H-->>P: Evicted (slot freed) + H->>W: Schedule children across fleet + W->>H: Child results + H->>P: Resume from checkpoint + P->>P: Inspect results, decide next step + P->>H: Spawn more children, sleep, or finish +``` + +This is fundamentally different from a DAG, where every task and dependency is declared before execution begins. With durable tasks, the number of children, which branches to take, and whether to loop or stop are all determined by your code at runtime. + + + + + +### Checkpoints + +Each call to `SleepFor`, `WaitForEvent`, `WaitFor`, `Memo`, or `RunChild` creates a checkpoint in the durable event log. These checkpoints record the task's progress. + +### Worker slot is freed during waits + +When a durable task enters a wait (sleep, event, or child result), Hatchet [evicts](/v1/durable-workflows/task-eviction) it from the worker. The slot is immediately available for other tasks. + +### Task resumes from checkpoint + +When the wait completes, Hatchet re-queues the task on any available worker. It replays the event log up to the last checkpoint and resumes execution from there. Completed operations are not re-executed. + + + +## The Durable Context + +Declare a task as durable (using `durable_task` instead of `task`) and it receives a `DurableContext` instead of a normal `Context`. The `DurableContext` extends `Context` with methods for checkpointing and waiting: + +| Method | Purpose | +| ----------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | +| **`SleepFor(duration)`** | Pause for a fixed duration. Respects the original sleep time on restart; if interrupted after 23 of 24 hours, only sleeps 1 more hour. | +| **`WaitForEvent(key, expr)`** | Wait for an external event by key, with optional [CEL filter](https://github.com/google/cel-spec) expression on the payload. | +| **`WaitFor(conditions)`** | General-purpose wait accepting any combination of sleep conditions, event conditions, or or-groups. `SleepFor` and `WaitForEvent` are convenience wrappers around this method. | +| **`Memo(function)`** | Run functions whose outputs are memoized based on the input arguments. | +| **`RunChild(task, input)`** | Spawn a child task and wait for its result. The parent is evicted during the wait. | + +## Example Task + + + +Now add tasks to the workflow. The first is a regular task; the second is a durable task that sleeps and waits for an event: + + + + + The `durable_task` decorator gives the function a `DurableContext` instead of + a regular `Context`. This is the only difference in declaration; the task + registers and runs on the same worker as regular tasks. + + +If this task is interrupted at any time, it will continue from where it left off. If the task calls `ctx.aio_sleep_for` for 24 hours and is interrupted after 23 hours, it will only sleep for 1 more hour on restart. + +### Or Groups + +Durable tasks can combine multiple wait conditions using [or groups](/v1/durable-workflows/conditions#or-groups). For example, you could wait for either an event or a sleep (whichever comes first): + + + +## Spawning Child Tasks + +Child spawning is the primary way durable tasks build workflows at runtime. A durable task can spawn any runnable (regular tasks, other durable tasks, or entire DAG workflows), wait for results, and decide what to do next. + +| Child type | Example | +| ---------------- | --------------------------------------------------------------------------------- | +| **Regular task** | Spawn a stateless task for a quick computation or API call. | +| **Durable task** | Spawn another durable task that has its own checkpoints, sleeps, and event waits. | +| **DAG workflow** | Spawn an entire multi-task workflow and wait for its final output. | + +The parent is evicted while children execute, so it consumes no resources. The number and type of children can be determined dynamically based on input, intermediate results, or model outputs. + +See [Child Spawning](/v1/durable-workflows/child-spawning) for patterns and full examples. + + + For an in-depth look at how durable execution works internally, see [this blog + post](https://hatchet.run/blog/durable-execution). + diff --git a/frontend/docs/pages/v1/durable-workflows-overview.mdx b/frontend/docs/pages/v1/durable-workflows-overview.mdx new file mode 100644 index 0000000000..f2e77fd254 --- /dev/null +++ b/frontend/docs/pages/v1/durable-workflows-overview.mdx @@ -0,0 +1,60 @@ +import { Callout, Cards } from "nextra/components"; +import DurableWorkflowComparisonDiagram from "@/components/DurableWorkflowComparisonDiagram"; + +# Durable Workflows + +A **durable workflow** is work whose execution state lives in Hatchet instead of in your process. When you run a durable workflow, the orchestrator owns that state: it records progress, survives your worker crashing or scaling down, and resumes from the last checkpoint so work is not lost or duplicated. + +## Why durable? + +With ordinary tasks, "where we are" in the workflow lives in memory. If the process dies, that state is gone. With durable workflows, execution state is stored in the Hatchet event log. The orchestrator can therefore: + +- **Recover from failures** — replay from the last recorded step on another worker instead of restarting from scratch. +- **Handle long waits** — release the worker slot during "wait 24 hours" or "wait for this event" steps, then resume when the wait completes. +- **Manage distributed state** — keep multi-step, branching, or long-running flows consistent and replayable across workers and restarts. + +Your code describes the steps; Hatchet makes them durable and resumable. + +## Two patterns + +Hatchet supports two patterns for building durable workflows, and you can [mix them](/v1/patterns/mixing-patterns) within the same application. Both are durable — the difference is how you express the work. The key difference is whether you know the **shape of work** ahead of time. + + + +**Durable task execution** — The shape of work is **dynamic**. A single long-running function that can pause for time or external signals (`SleepFor`, `WaitForEvent`) and [spawn child tasks](/v1/durable-workflows/child-spawning) at runtime. Use durable tasks when: + +- The work is IO-bound — waiting for time to pass, external events, or human approval +- The number of subtasks is determined at runtime (dynamic fan-out) +- You need procedural control flow — loops, branches, or agent-style reasoning + +**Directed acyclic graphs (DAGs)** — The shape of work is **known upfront**. You declare which tasks run, in what order, and what depends on what. Hatchet handles execution, parallelism, and retries within that fixed structure. Use DAGs when: + +- You have a well-defined pipeline (ETL, multi-step data processing) +- Every task and dependency is known before the workflow starts +- You want the full graph visible in the dashboard for debugging and monitoring + +## Choosing a pattern + +DAGs are **easier to visualize and reason about** — every task, dependency, and data flow is visible as a graph. Durable tasks offer **more flexibility** — they can branch, loop, and spawn children dynamically — but their runtime behavior is harder to predict from the code alone. When in doubt, start with a DAG and reach for a durable task only when you need capabilities a static graph can't express. You can always [mix both patterns](/v1/patterns/mixing-patterns) in the same application. + +## How workflows relate to tasks + +A workflow is a **container of tasks**. Both standalone tasks and workflows are **runnables** — they share the same API (`run`, `run_no_wait`, `schedule`, and the other trigger methods all work identically). + + + + Checkpoints, durable context, and when to use it. + + + Multi-task workflows with dependencies and parallel execution. + + + Combine durable tasks and DAGs within the same workflow. + + diff --git a/frontend/docs/pages/home/environments.mdx b/frontend/docs/pages/v1/environments.mdx similarity index 93% rename from frontend/docs/pages/home/environments.mdx rename to frontend/docs/pages/v1/environments.mdx index f557a73727..654ac72766 100644 --- a/frontend/docs/pages/home/environments.mdx +++ b/frontend/docs/pages/v1/environments.mdx @@ -19,4 +19,4 @@ The easiest way to isolate environments for different developers or teams is to ### Solution 2: Local Hatchet Instance -If you are using Hatchet locally, you can create a local instance of Hatchet to manage your isolated local development environment. Follow instructions [here](../self-hosting/hatchet-lite.mdx) to get started. +If you are using Hatchet locally, you can create a local instance of Hatchet to manage your isolated local development environment. Follow instructions [here](/self-hosting/hatchet-lite) to get started. diff --git a/frontend/docs/pages/v1/error-handling/_meta.js b/frontend/docs/pages/v1/error-handling/_meta.js new file mode 100644 index 0000000000..a895a2a2e8 --- /dev/null +++ b/frontend/docs/pages/v1/error-handling/_meta.js @@ -0,0 +1,7 @@ +export default { + index: { display: "hidden" }, + "retry-policies": { display: "hidden" }, + timeouts: { display: "hidden" }, + cancellation: { display: "hidden" }, + "bulk-retries-and-cancellations": { display: "hidden" }, +}; diff --git a/frontend/docs/pages/v1/error-handling/bulk-retries-and-cancellations.mdx b/frontend/docs/pages/v1/error-handling/bulk-retries-and-cancellations.mdx new file mode 100644 index 0000000000..6d639047d6 --- /dev/null +++ b/frontend/docs/pages/v1/error-handling/bulk-retries-and-cancellations.mdx @@ -0,0 +1,128 @@ +import { Callout, Card, Cards, Steps, Tabs } from "nextra/components"; +import UniversalTabs from "@/components/UniversalTabs"; +import { Snippet } from "@/components/code"; +import { snippets } from "@/lib/generated/snippets"; + +# Bulk Cancellations and Replays + +V1 adds the ability to cancel or replay task runs in bulk, which you can now do either in the Hatchet Dashboard or programmatically via the SDKs and the REST API. + +There are two ways of bulk cancelling or replaying tasks in both cases: + +1. You can provide a list of task run ids to cancel or replay, which will cancel or replay all of the tasks in the list. +2. You can provide a list of filters, similar to the list of filters on task runs in the Dashboard, and cancel or replay runs matching those filters. For instance, if you wanted to replay all failed runs of a `SimpleTask` from the past fifteen minutes that had the `foo` field in `additional_metadata` set to `bar`, you could apply those filters and replay all of the matching runs. + +### Bulk Operations by Run Ids + +The first way to bulk cancel or replay runs is by providing a list of run ids. This is the most straightforward way to cancel or replay runs in bulk. + + +{/* TODO V1 DOCS - Add TS */} + + + + In the Python SDK, the mechanics of bulk replaying and bulk cancelling tasks + are exactly the same. The only change would be replacing e.g. + `hatchet.runs.bulk_cancel` with `hatchet.runs.bulk_replay`. + + + First, we'll start by fetching a task via the REST API. + + + + Now that we have a task, we'll get runs for it, so that we can use them to bulk cancel by run id. + + + + And finally, we can cancel the runs in bulk. + + + + + Note that the Python SDK also exposes async versions of each of these methods: + + - `workflows.list` -> `await workflows.aio_list` + - `runs.list` -> `await runs.aio_list` + - `runs.bulk_cancel` -> `await runs.aio_bulk_cancel` + + + + {/* + TODO V1 DOCS + */} + + + + Just like in the Python SDK, the mechanics of bulk replaying and bulk + cancelling tasks are exactly the same. + + + First, we'll start by fetching a task via the REST API. + + + + Now that we have a task, we'll get runs for it, so that we can use them to bulk cancel by run id. + + + + And finally, we can cancel the runs in bulk. + + + + + + + + + + + +### Bulk Operations by Filters + +The second way to bulk cancel or replay runs is by providing a list of filters. This is the most powerful way to cancel or replay runs in bulk, as it allows you to cancel or replay all runs matching a set of arbitrary filters without needing to provide IDs for the runs in advance. + + +{/* TODO V1 DOCS - Add TS */} + + + The example below provides some filters you might use to cancel or replay runs in bulk. Importantly, these filters are very similar to the filters you can use in the Hatchet Dashboard to filter which task runs are displaying. + + + + Running this request will cancel all task runs matching the filters provided. + + + {/* + TODO V1 DOCS + */} + + +The example below provides some filters you might use to cancel or replay runs in bulk. Importantly, these filters are very similar to the filters you can use in the Hatchet Dashboard to filter which task runs are displaying. + + + + Running this request will cancel all task runs matching the filters provided. + + + + + + + +# Manual Retries + +Hatchet provides a manual retry mechanism that allows you to handle failed task instances flexibly from the Hatchet dashboard. + +Navigate to the specific task in the Hatchet dashboard and click on the failed run. From there, you can inspect the details of the run, including the input data and the failure reason for each task. + +To retry a failed task, simply click on the task in the run details view and then click the "Replay" button. This will create a new instance of the task, starting from the failed task, and using the same input data as the original run. + +Manual retries give you full control over when and how to reprocess failed instances. For example, you may choose to wait until an external service is back online before retrying instances that depend on that service, or you may need to deploy a bug fix to your task code before retrying instances that were affected by the bug. + +## A Note on Dead Letter Queues + +A dead letter queue (DLQ) is a messaging concept used to handle messages that cannot be processed successfully. In the context of task management, a DLQ can be used to store failed task instances that require manual intervention or further analysis. + +While Hatchet does not have a built-in dead letter queue feature, the persistence of failed task instances in the dashboard serves a similar purpose. By keeping a record of failed instances, Hatchet allows you to track and manage failures, perform root cause analysis, and take appropriate actions, such as modifying input data or updating your task code before manually retrying the failed instances. + +It's important to note that the term "dead letter queue" is more commonly associated with messaging systems like Apache Kafka or Amazon SQS, where unprocessed messages are automatically moved to a separate queue for manual handling. In Hatchet, the failed instances are not automatically moved to a separate queue but are instead persisted in the dashboard for manual management. diff --git a/frontend/docs/pages/v1/error-handling/cancellation.mdx b/frontend/docs/pages/v1/error-handling/cancellation.mdx new file mode 100644 index 0000000000..7fce04cd7e --- /dev/null +++ b/frontend/docs/pages/v1/error-handling/cancellation.mdx @@ -0,0 +1,68 @@ +import { Callout, Card, Cards, Steps, Tabs } from "nextra/components"; +import UniversalTabs from "@/components/UniversalTabs"; +import { Snippet } from "@/components/code"; +import { snippets } from "@/lib/generated/snippets"; + +# Cancellation in Hatchet Tasks + +Hatchet provides a mechanism for canceling task executions gracefully, allowing you to signal to running tasks that they should stop running. Cancellation can be triggered on graceful termination of a worker or automatically through concurrency control strategies like [`CANCEL_IN_PROGRESS`](./concurrency.mdx#cancel_in_progress), which cancels currently running task instances to free up slots for new instances when the concurrency limit is reached. + +When a task is canceled, Hatchet sends a cancellation signal to the task. The task can then check for the cancellation signal and take appropriate action, such as cleaning up resources, aborting network requests, or gracefully terminating their execution. + +## Cancellation Mechanisms + + + + + + + + + + + + + + + + + + + + + +## Cancellation Best Practices + +When working with cancellation in Hatchet tasks, consider the following best practices: + +1. **Graceful Termination**: When a task receives a cancellation signal, aim to terminate its execution gracefully. Clean up any resources, abort pending operations, and perform any necessary cleanup tasks before returning from the task function. + +2. **Cancellation Checks**: Regularly check for cancellation signals within long-running tasks or loops. This allows the task to respond to cancellation in a timely manner and avoid unnecessary processing. + +3. **Asynchronous Operations**: If a task performs asynchronous operations, such as network requests or file I/O, consider passing the cancellation signal to those operations. Many libraries and APIs support cancellation through the `AbortSignal` interface. + +4. **Error Handling**: Handle cancellation errors appropriately. Distinguish between cancellation errors and other types of errors to provide meaningful error messages and take appropriate actions. + +5. **Cancellation Propagation**: If a task invokes other functions or libraries, consider propagating the cancellation signal to those dependencies. This ensures that cancellation is handled consistently throughout the task. + +## Additional Features + +In addition to the methods of cancellation listed here, Hatchet also supports [bulk cancellation](./bulk-retries-and-cancellations.mdx), which allows you to cancel many tasks in bulk using either their IDs or a set of filters, which is often the easiest way to cancel many things at once. + +## Conclusion + +Cancellation is a powerful feature in Hatchet that allows you to gracefully stop task executions when needed. Remember to follow best practices when implementing cancellation in your tasks, such as graceful termination, regular cancellation checks, handling asynchronous operations, proper error handling, and cancellation propagation. + +By incorporating cancellation into your Hatchet tasks and workflows, you can build more resilient and responsive systems that can adapt to changing circumstances and user needs. diff --git a/frontend/docs/pages/v1/error-handling/index.mdx b/frontend/docs/pages/v1/error-handling/index.mdx new file mode 100644 index 0000000000..d268c8ca90 --- /dev/null +++ b/frontend/docs/pages/v1/error-handling/index.mdx @@ -0,0 +1,13 @@ +--- +title: Reliability +description: Retries, timeouts, cancellation, and bulk error handling in Hatchet. +--- + +# Reliability + +Handle failures gracefully with retries, timeouts, cancellation, and bulk operations. + +- [Retry Policies](/v1/retry-policies) — Automatic retry on failure +- [Timeouts](/v1/timeouts) — Set execution time limits +- [Cancellation](/v1/cancellation) — Cancel running tasks +- [Bulk Retries and Cancellations](/v1/bulk-retries-and-cancellations) — Operate on runs in bulk diff --git a/frontend/docs/pages/v1/error-handling/retry-policies.mdx b/frontend/docs/pages/v1/error-handling/retry-policies.mdx new file mode 100644 index 0000000000..ca97234495 --- /dev/null +++ b/frontend/docs/pages/v1/error-handling/retry-policies.mdx @@ -0,0 +1,120 @@ +import { snippets } from "@/lib/generated/snippets"; +import { Snippet } from "@/components/code"; +import { Callout, Card, Cards, Steps, Tabs } from "nextra/components"; +import UniversalTabs from "@/components/UniversalTabs"; + +# Simple Task Retries + +Hatchet provides a simple and effective way to handle failures in your tasks using a retry policy. This feature allows you to specify the number of times a task should be retried if it fails, helping to improve the reliability and resilience of your tasks. + + + Task-level retries can be added to both `Standalone Tasks` and `Workflow + Tasks`. + + +## How it works + +When a task fails (i.e. throws an error or returns a non-zero exit code), Hatchet can automatically retry the task based on the `retries` configuration defined in the task object. Here's how it works: + +1. If a task fails and `retries` is set to a value greater than 0, Hatchet will catch the error and retry the task. +2. The task will be retried up to the specified number of times, with each retry being executed after a short delay to avoid overwhelming the system. +3. If the task succeeds during any of the retries, the task will continue as normal. +4. If the task continues to fail after exhausting all the specified retries, the task will be marked as failed. + +This simple retry mechanism can help to mitigate transient failures, such as network issues or temporary unavailability of external services, without requiring complex error handling logic in your task code. + +## How to use task-level retries + +To enable retries for a task, simply add the `retries` property to the task object in your task definition: + + + + + + + + + + + + + + + + +You can add the `retries` property to any task, and Hatchet will handle the retry logic automatically. + +It's important to note that task-level retries are not suitable for all types of failures. For example, if a task fails due to a programming error or an invalid configuration, retrying the task will likely not resolve the issue. In these cases, you should fix the underlying problem in your code or configuration rather than relying on retries. + +Additionally, if a task interacts with external services or databases, you should ensure that the operation is idempotent (i.e. can be safely repeated without changing the result) before enabling retries. Otherwise, retrying the task could lead to unintended side effects or inconsistencies in your data. + +## Accessing the Retry Count in a Running Task + +If you need to access the current retry count within a task, you can use the `retryCount` method available in the task context: + + + + + + + + + + + + + + + + +## Exponential Backoff + +Hatchet also supports exponential backoff for retries, which can be useful for handling failures in a more resilient manner. Exponential backoff increases the delay between retries exponentially, giving the failing service more time to recover before the next retry. + + + + + + + + + + + + + + + + +## Bypassing Retry logic + +The Hatchet SDKs each expose a `NonRetryable` exception, which allows you to bypass pre-configured retry logic for the task. **If your task raises this exception, it will not be retried.** This allows you to circumvent the default retry behavior in instances where you don't want to or cannot safely retry. Some examples in which this might be useful include: + +1. A task that calls an external API which returns a 4XX response code. +2. A task that contains a single non-idempotent operation that can fail but cannot safely be rerun on failure, such as a billing operation. +3. A failure that requires manual intervention to resolve. + + + + + + + + + + + + + + + + +In these cases, even though `retries` is set to a non-zero number (meaning the task would ordinarily retry), Hatchet will not retry. + +## Conclusion + +Hatchet's task-level retry feature is a simple and effective way to handle transient failures in your tasks, improving the reliability and resilience of your tasks. By specifying the number of retries for each task, you can ensure that your tasks can recover from temporary issues without requiring complex error handling logic. + +Remember to use retries judiciously and only for tasks that are idempotent and can safely be repeated. For more advanced retry strategies, such as exponential backoff or circuit breaking, stay tuned for future updates to Hatchet's retry capabilities. diff --git a/frontend/docs/pages/v1/error-handling/timeouts.mdx b/frontend/docs/pages/v1/error-handling/timeouts.mdx new file mode 100644 index 0000000000..eec451a31c --- /dev/null +++ b/frontend/docs/pages/v1/error-handling/timeouts.mdx @@ -0,0 +1,106 @@ +import { snippets } from "@/lib/generated/snippets"; +import { Snippet } from "@/components/code"; +import { Callout, Card, Cards, Steps, Tabs } from "nextra/components"; +import UniversalTabs from "@/components/UniversalTabs"; + +# Timeouts in Hatchet + +Timeouts are an important concept in Hatchet that allow you to control how long a task is allowed to run before it is considered to have failed. This is useful for ensuring that your tasks don't run indefinitely and consume unnecessary resources. Timeouts in Hatchet are treated as failures and the task will be [retried](./retry-policies.mdx) if specified. + +There are two types of timeouts in Hatchet: + +1. **Scheduling Timeouts** (Default 5m) - the time a task is allowed to wait in the queue before it is cancelled +2. **Execution Timeouts** (Default 60s) - the time a task is allowed to run before it is considered to have failed + +## Timeout Format + +In Hatchet, timeouts are specified using a string in the format ``, where `` is an integer and `` is one of: + +- `s` for seconds +- `m` for minutes +- `h` for hours + +For example: + +- `10s` means 10 seconds +- `4m` means 4 minutes +- `1h` means 1 hour + +If no unit is specified, seconds are assumed. + + + In the Python SDK, timeouts can also be specified as a `datetime.timedelta` + object. + + +### Task-Level Timeouts + +You can specify execution and scheduling timeouts for a task using the `execution_timeout` and `schedule_timeout` parameters when creating a task. + + + + + + + + + + + + + + + + + + + +In these tasks, both timeouts are specified, meaning: + +1. If the task is not scheduled before the `schedule_timeout` is reached, it will be cancelled. +2. If the task does not complete before the `execution_timeout` is reached (after starting), it will be cancelled. + + + A timed out step does not guarantee that the step will be stopped immediately. + The step will be stopped as soon as the worker is able to stop the step. See + [cancellation](./cancellation.mdx) for more information. + + +## Refreshing Timeouts + +In some cases, you may need to extend the timeout for a step while it is running. This can be done using the `refreshTimeout` method provided by the step context (`ctx`). + +For example: + + + + + + + + + + + + + + + + + +In this example, the step initially would exceed its execution timeout. But before it does, we call the `refreshTimeout` method, which extends the timeout and allows it to complete. Importantly, refreshing a timeout is an additive operation - the new timeout is added to the existing timeout. So for instance, if the task originally had a timeout of `30s` and we call `refreshTimeout("15s")`, the new timeout will be `45s`. + +The `refreshTimeout` function can be called multiple times within a step to further extend the timeout as needed. + +## Use Cases + +Timeouts are useful in a variety of scenarios: + +- Ensuring tasks don't run indefinitely and consume unnecessary resources +- Failing tasks early if a critical step takes too long +- Keeping tasks responsive by ensuring individual steps complete in a timely manner +- Preventing infinite loops or hung processes from blocking the entire system + +For example, if you have a task that makes an external API call, you may want to set a timeout to ensure the task fails quickly if the API is unresponsive, rather than waiting indefinitely. + +By carefully considering timeouts for your tasks and steps, you can build more resilient and responsive systems with Hatchet. diff --git a/frontend/docs/pages/v1/events.mdx b/frontend/docs/pages/v1/events.mdx new file mode 100644 index 0000000000..bd513e5d98 --- /dev/null +++ b/frontend/docs/pages/v1/events.mdx @@ -0,0 +1,201 @@ +import { snippets } from "@/lib/generated/snippets"; +import { Snippet } from "@/components/code"; +import { Callout, Tabs } from "nextra/components"; +import UniversalTabs from "@/components/UniversalTabs"; +import HumanInLoopDiagram from "@/components/HumanInLoopDiagramWrapper"; + +# Events + +Tasks can pause until an external event arrives before continuing. This is the foundation for [human-in-the-loop](/guides/human-in-the-loop) workflows, webhook-driven pipelines, and any flow that depends on signals from outside the task. + + + +Both durable tasks and DAGs support waiting for events. Durable tasks call `WaitForEvent` dynamically at runtime, while DAGs declare event conditions upfront on the task definition. + +Events are delivered by [pushing events](/v1/external-events/pushing-events) into Hatchet using the event client. The event key you push must match the key your task is waiting for. + + + + +## Wait For Events + +Wait For Events lets a durable task pause until an external event arrives. Even if the task is interrupted and requeued while waiting, the event will still be processed. When it resumes, it reads the event from the durable event log and continues. + + + Waiting for an event puts the task into an [evictable + state](/v1/durable-workflows/task-eviction), the worker slot is freed and the + task is re-queued when the event arrives. + + +### Declaring a wait for event + +Wait For Event is declared using the context method `WaitFor` (or utility method `WaitForEvent`) on the `DurableContext` object. + + + + + + + + + + + + + + + + + + + + + + +### Event filters + +Events can be filtered using [CEL](https://github.com/google/cel-spec) expressions. For example, to only receive `user:update` events for a specific user: + + + + + + + + + + + + + + + + + + + + + + +### Pushing events + +For a waiting task to resume, something must [push an event](/v1/external-events/pushing-events) into Hatchet with a matching key. You can do this from any service that has access to the Hatchet client. + + + + + + + + + + + + + + + + +When the pushed event's key matches what a durable task is waiting for (and passes any CEL filter), the task is re-queued and resumes from its checkpoint. + + + + +## Event Conditions + +Event conditions let a DAG task react to external events. A task can wait for an event before running, be skipped when an event arrives, or be cancelled by an event. + +Unlike durable tasks (where `WaitForEvent` is called dynamically at runtime), DAG event conditions are declared upfront on the task definition. + +### Usage modes + +Event conditions can be used with three operators: + +- **`wait_for`** — the task waits for the event before starting. +- **`skip_if`** — the task is skipped if the event arrives. +- **`cancel_if`** — the task is cancelled if the event arrives. + + + A task cancelled by `cancel_if` behaves like any other cancellation in Hatchet + — downstream tasks will be cancelled as well. + + +### Waiting for an event + +Declare a task with a `wait_for` event condition. The task will not start until the specified event is pushed into Hatchet. + + + + + + + + + + + + + + + + +### Skipping on an event + +Declare a task with a `skip_if` event condition. The task will be skipped if the event arrives before the task starts. + + + + + + + + + + + + + + + + +### Event filters + +Events can be filtered using [CEL](https://github.com/google/cel-spec) expressions. The CEL expression is evaluated against the event payload, and the condition only matches if the expression returns `true`. This works identically to event filters in durable tasks. + +### Pushing events + +For a waiting task to proceed, something must [push an event](/v1/external-events/pushing-events) into Hatchet with a matching key. You can do this from any service that has access to the Hatchet client. + + + + + + + + + + + + + + + + +### Combining with other conditions + +Event conditions can be combined with parent and sleep conditions using or groups. For example, you can wait for _either_ an event or a timeout (whichever comes first). See [Conditions & Branching](/v1/durable-workflows/conditions) for details. + + + diff --git a/frontend/docs/pages/v1/external-events/_meta.js b/frontend/docs/pages/v1/external-events/_meta.js new file mode 100644 index 0000000000..ce3afa6649 --- /dev/null +++ b/frontend/docs/pages/v1/external-events/_meta.js @@ -0,0 +1,6 @@ +export default { + index: { display: "hidden" }, + "pushing-events": "Pushing Events", + "run-on-event": "Event Trigger", + "event-filters": "Event Filters", +}; diff --git a/frontend/docs/pages/home/run-on-event.mdx b/frontend/docs/pages/v1/external-events/event-filters.mdx similarity index 55% rename from frontend/docs/pages/home/run-on-event.mdx rename to frontend/docs/pages/v1/external-events/event-filters.mdx index 6c21f01821..efd5df601a 100644 --- a/frontend/docs/pages/home/run-on-event.mdx +++ b/frontend/docs/pages/v1/external-events/event-filters.mdx @@ -3,80 +3,15 @@ import { Snippet } from "@/components/code"; import { Callout, Card, Cards, Steps, Tabs } from "nextra/components"; import UniversalTabs from "@/components/UniversalTabs"; -# Run on Event +# Event Filters -> This example assumes we have a [task](./your-first-task.mdx) registered on a running [worker](./workers.mdx). +Events can be _filtered_ in Hatchet, which allows you to push events to Hatchet and only trigger task runs from them in certain cases. **If you enable filters on a workflow, your workflow will be triggered once for each matching filter on any incoming event with a matching scope** (more on scopes below). -Run-on-event allows you to trigger one or more tasks when a specific event occurs. This is useful when you need to execute a task in response to an ephemeral event where the result is not important. A few common use cases for event-triggered task runs are: - -1. Running a task when an ephemeral event is received, such as a webhook or a message from a queue. -2. When you want to run multiple independent tasks in response to a single event. For instance, if you wanted to run a `send_welcome_email` task, and you also wanted to run a `grant_new_user_credits` task, and a `reward_referral` task, all triggered by the signup. In this case, you might declare all three of those tasks with an event trigger for `user:signup`, and then have them all kick off when that event happens. - - - Event triggers evaluate tasks to run at the time of the event. If an event is - received before the task is registered, the task will not be run. - - -## Declaring Event Triggers - -To run a task on an event, you need to declare the event that will trigger the task. This is done by declaring the `on_events` property in the task declaration. - - - - - - - - - - - - - - - - - - Note: Multiple tasks can be triggered by the same event. - - - - As of engine version 0.65.0, Hatchet supports wildcard event triggers using - the `*` wildcard pattern. For example, you could register `subscription:*` as - your event key, which would match incoming events like `subcription:create`, - `subscription:renew`, `subscription:cancel`, and so on. - - -### Pushing an Event - -You can push an event to the event queue by calling the `push` method on the Hatchet event client and providing the event name and payload. - - - - - - - - - - - - - - - - -## Event Filtering - -Events can also be _filtered_ in Hatchet, which allows you to push events to Hatchet and only trigger task runs from them in certain cases. **If you enable filters on a workflow, your workflow will be triggered once for each matching filter on any incoming event with a matching scope** (more on scopes below). - -### Basic Usage +## Basic Usage There are two ways to create filters in Hatchet. -#### Default filters on the workflow +### Default filters on the workflow The simplest way to create a filter is to register it declaratively with your workflow when it's created. For example: @@ -97,11 +32,11 @@ The simplest way to create a filter is to register it declaratively with your wo In each of these cases, we register a filter with the workflow. Note that these "declarative" filters are overwritten each time your workflow is updated, so the ids associated with them will not be stable over time. This allows you to modify a filter in-place or remove a filter, and not need to manually delete it over the API. -#### Filters feature client +### Filters feature client You also can create event filters by using the `filters` clients on the SDKs: - + @@ -126,7 +61,7 @@ You also can create event filters by using the `filters` clients on the SDKs: Then, push an event that uses the filter to determine whether or not to run. For instance, this run will be skipped, since the payload does not match the expression: - + @@ -143,7 +78,7 @@ Then, push an event that uses the filter to determine whether or not to run. For But this one will be triggered since the payload _does_ match the expression: - + @@ -163,11 +98,11 @@ But this one will be triggered since the payload _does_ match the expression: filter to determine which tasks to trigger. -### Accessing the filter payload +## Accessing the filter payload You can access the filter payload by using the `Context` in the task that was triggered by your event: - + @@ -184,7 +119,7 @@ You can access the filter payload by using the `Context` in the task that was tr -### Advanced Usage +## Advanced Usage In addition to referencing `input` in the expression (which corresponds to the _event_ payload), you can also reference the following fields: diff --git a/frontend/docs/pages/v1/external-events/index.mdx b/frontend/docs/pages/v1/external-events/index.mdx new file mode 100644 index 0000000000..360c739944 --- /dev/null +++ b/frontend/docs/pages/v1/external-events/index.mdx @@ -0,0 +1,12 @@ +--- +title: External Events +description: Push events and trigger tasks from external sources. +--- + +# External Events + +Integrate external event sources to trigger and coordinate tasks. + +- [Pushing Events](/v1/external-events/pushing-events) — Send events to Hatchet +- [Event Trigger](/v1/external-events/run-on-event) — Trigger tasks on events +- [Event Filters](/v1/external-events/event-filters) — Filter which events trigger runs diff --git a/frontend/docs/pages/v1/external-events/pushing-events.mdx b/frontend/docs/pages/v1/external-events/pushing-events.mdx new file mode 100644 index 0000000000..8ac88441cf --- /dev/null +++ b/frontend/docs/pages/v1/external-events/pushing-events.mdx @@ -0,0 +1,28 @@ +import { snippets } from "@/lib/generated/snippets"; +import { Snippet } from "@/components/code"; +import { Callout, Card, Cards, Steps, Tabs } from "nextra/components"; +import UniversalTabs from "@/components/UniversalTabs"; + +# Pushing Events + +You can push an event to Hatchet by calling the `push` method on the Hatchet event client and providing the event name and payload. Any tasks that have registered an [event trigger](/v1/external-events/run-on-event) for that event key will be run. + + + + + + + + + + + + + + + + + + Event triggers evaluate tasks to run at the time of the event. If an event is + received before the task is registered, the task will not be run. + diff --git a/frontend/docs/pages/v1/external-events/run-on-event.mdx b/frontend/docs/pages/v1/external-events/run-on-event.mdx new file mode 100644 index 0000000000..3605e4955b --- /dev/null +++ b/frontend/docs/pages/v1/external-events/run-on-event.mdx @@ -0,0 +1,50 @@ +import { snippets } from "@/lib/generated/snippets"; +import { Snippet } from "@/components/code"; +import { Callout, Card, Cards, Steps, Tabs } from "nextra/components"; +import UniversalTabs from "@/components/UniversalTabs"; + +# Event Trigger + +> This example assumes we have a [task](/v1/tasks) registered on a running [worker](/v1/workers). + +Run-on-event allows you to trigger one or more tasks when a specific event occurs. This is useful when you need to execute a task in response to an ephemeral event where the result is not important. See [Event-Driven Systems](/guides/event-driven) for a detailed guide. A few common use cases for event-triggered task runs are: + +1. Running a task when an ephemeral event is received, such as a webhook or a message from a queue. +2. When you want to run multiple independent tasks in response to a single event. For instance, if you wanted to run a `send_welcome_email` task, and you also wanted to run a `grant_new_user_credits` task, and a `reward_referral` task, all triggered by the signup. In this case, you might declare all three of those tasks with an event trigger for `user:signup`, and then have them all kick off when that event happens. + + + Event triggers evaluate tasks to run at the time of the event. If an event is + received before the task is registered, the task will not be run. + + +## Declaring Event Triggers + +To run a task on an event, you need to declare the event that will trigger the task. This is done by declaring the `on_events` property in the task declaration. + + + + + + + + + + + + + + + + + + Note: Multiple tasks can be triggered by the same event. + + + + As of engine version 0.65.0, Hatchet supports wildcard event triggers using + the `*` wildcard pattern. For example, you could register `subscription:*` as + your event key, which would match incoming events like `subcription:create`, + `subscription:renew`, `subscription:cancel`, and so on. + diff --git a/frontend/docs/pages/v1/flow-control/_meta.js b/frontend/docs/pages/v1/flow-control/_meta.js new file mode 100644 index 0000000000..d328e7fd4e --- /dev/null +++ b/frontend/docs/pages/v1/flow-control/_meta.js @@ -0,0 +1,6 @@ +export default { + index: { display: "hidden" }, + concurrency: { display: "hidden" }, + "rate-limits": { display: "hidden" }, + priority: { display: "hidden" }, +}; diff --git a/frontend/docs/pages/v1/flow-control/concurrency.mdx b/frontend/docs/pages/v1/flow-control/concurrency.mdx new file mode 100644 index 0000000000..7383363554 --- /dev/null +++ b/frontend/docs/pages/v1/flow-control/concurrency.mdx @@ -0,0 +1,160 @@ +import { Callout, Card, Cards, Steps, Tabs } from "nextra/components"; +import UniversalTabs from "@/components/UniversalTabs"; +import { Snippet } from "@/components/code"; +import { snippets } from "@/lib/generated/snippets"; + +# Concurrency Control in Hatchet Tasks + +Hatchet provides powerful concurrency control features to help you manage the execution of your tasks. This is particularly useful when you have tasks that may be triggered frequently or have long-running steps, and you want to limit the number of concurrent executions to prevent overloading your system, ensure fairness, or avoid race conditions. + + + Concurrency strategies can be added to both `Tasks` and `Workflows`. + + +### Why use concurrency control? + +There are several reasons why you might want to use concurrency control in your Hatchet tasks: + +1. **Fairness**: When you have multiple clients or users triggering tasks, concurrency control can help ensure fair access to resources. By limiting the number of concurrent runs per client or user, you can prevent a single client from monopolizing the system and ensure that all clients get a fair share of the available resources. + +2. **Resource management**: If your task steps are resource-intensive (e.g., they make external API calls or perform heavy computations), running too many instances concurrently can overload your system. By limiting concurrency, you can ensure your system remains stable and responsive. + +3. **Avoiding race conditions**: If your task steps modify shared resources, running multiple instances concurrently can lead to race conditions and inconsistent data. Concurrency control helps you avoid these issues by ensuring only a limited number of instances run at a time. + +4. **Compliance with external service limits**: If your task steps interact with external services that have rate limits, concurrency control can help you stay within those limits and avoid being throttled or blocked. + +5. **Spike Protection**: When you have tasks that are triggered by external events, such as webhooks or user actions, you may experience spikes in traffic that can overwhelm your system. Concurrency control can help you manage these spikes by limiting the number of concurrent runs and queuing new runs until resources become available. + +### Available Strategies: + +- [`GROUP_ROUND_ROBIN`](#group-round-robin): Distribute task instances across available slots in a round-robin fashion based on the `key` function. +- [`CANCEL_IN_PROGRESS`](#cancel-in-progress): Cancel the currently running task instances for the same concurrency key to free up slots for the new instance. +- [`CANCEL_NEWEST`](#cancel-newest): Cancel the newest task instance for the same concurrency key to free up slots for the new instance. + +> We're always open to adding more strategies to fit your needs. Join our [discord](https://hatchet.run/discord) to let us know. + +### Setting concurrency on workers + +In addition to setting concurrency limits at the task level, you can also control concurrency at the worker level by passing the `slots` option when creating a new `Worker` instance: + + + + + + + + + + + + + + + + +This example will only let 1 run in each group run at a given time to fairly distribute the load across the workers. + +## Group Round Robin + +### How it works + +When a new task instance is triggered, the `GROUP_ROUND_ROBIN` strategy will: + +1. Determine the group that the instance belongs to based on the `key` function defined in the task's concurrency configuration. +2. Check if there are any available slots for the instance's group based on the `slots` limit of available workers. +3. If a slot is available, the new task instance starts executing immediately. +4. If no slots are available, the new task instance is added to a queue for its group. +5. When a running task instance completes and a slot becomes available for a group, the next queued instance for that group (in round-robin order) is dequeued and starts executing. + +This strategy ensures that task instances are processed fairly across different groups, preventing any one group from monopolizing the available resources. It also helps to reduce latency for instances within each group, as they are processed in a round-robin fashion rather than strictly in the order they were triggered. + +### When to use `GROUP_ROUND_ROBIN` + +The `GROUP_ROUND_ROBIN` strategy is particularly useful in scenarios where: + +- You have multiple clients or users triggering task instances, and you want to ensure fair resource allocation among them. +- You want to process instances within each group in a round-robin fashion to minimize latency and ensure that no single instance within a group is starved for resources. +- You have long-running task instances and want to avoid one group's instances monopolizing the available slots. + +Keep in mind that the `GROUP_ROUND_ROBIN` strategy may not be suitable for all use cases, especially those that require strict ordering or prioritization of the most recent events. + +## Cancel In Progress + +### How it works + +When a new task instance is triggered, the `CANCEL_IN_PROGRESS` strategy will: + +1. Determine the group that the instance belongs to based on the `key` function defined in the task's concurrency configuration. +2. Check if there are any available slots for the instance's group based on the `maxRuns` limit of available workers. +3. If a slot is available, the new task instance starts executing immediately. +4. If there are no available slots, currently running task instances for the same concurrency key are cancelled to free up slots for the new instance. +5. The new task instance starts executing immediately. + +### When to use Cancel In Progress + +The `CANCEL_IN_PROGRESS` strategy is particularly useful in scenarios where: + +- You have long-running task instances that may become stale or irrelevant if newer instances are triggered. +- You want to prioritize processing the most recent data or events, even if it means canceling older task instances. +- You have resource-intensive tasks where it's more efficient to cancel an in-progress instance and start a new one than to wait for the old instance to complete. +- Your user UI allows for multiple inputs, but only the most recent is relevant (i.e. chat messages, form submissions, etc.). + +## Cancel Newest + +### How it works + +The `CANCEL_NEWEST` strategy is similar to `CANCEL_IN_PROGRESS`, but it cancels the newly enqueued run instead of the oldest. + +### When to use `CANCEL_NEWEST` + +The `CANCEL_NEWEST` strategy is particularly useful in scenarios where: + +- You want to allow in progress runs to complete before starting new work. +- You have long-running task instances and want to avoid one group's instances monopolizing the available slots. + +## Multiple concurrency strategies + +You can also combine multiple concurrency strategies to create a more complex concurrency control system. For example, you can use one group key to represent a specific team, and another group to represent a specific resource in that team, giving you more control over the rate at which tasks are executed. + + + + + + + + + + + + + + + diff --git a/frontend/docs/pages/v1/flow-control/index.mdx b/frontend/docs/pages/v1/flow-control/index.mdx new file mode 100644 index 0000000000..2a429e64ea --- /dev/null +++ b/frontend/docs/pages/v1/flow-control/index.mdx @@ -0,0 +1,12 @@ +--- +title: Flow Control +description: Concurrency, rate limiting, and priority controls for Hatchet tasks. +--- + +# Flow Control + +Control how tasks are scheduled and executed with concurrency limits, rate limits, and priority. + +- [Concurrency](/v1/concurrency) — Limit concurrent task execution +- [Rate Limits](/v1/rate-limits) — Throttle task throughput +- [Priority](/v1/priority) — Prioritize tasks in the queue diff --git a/frontend/docs/pages/v1/flow-control/priority.mdx b/frontend/docs/pages/v1/flow-control/priority.mdx new file mode 100644 index 0000000000..190cde8e34 --- /dev/null +++ b/frontend/docs/pages/v1/flow-control/priority.mdx @@ -0,0 +1,101 @@ +import { snippets } from "@/lib/generated/snippets"; +import { Snippet } from "@/components/code"; +import { Callout, Card, Cards, Steps, Tabs } from "nextra/components"; +import UniversalTabs from "@/components/UniversalTabs"; + +# Assigning priority to tasks in Hatchet + +Hatchet allows you to assign different `priority` values to your tasks depending on how soon you want them to run. `priority` can be set to either `1`, `2`, or `3`, (`low`, `medium`, and `high`, respectively) with relatively higher values resulting in that task being picked up before others of the same type. **By default, runs in Hatchet have a priority of 1 (low) unless otherwise specified.** + + + +Priority only affects multiple runs of a _single_ workflow. If you have two different workflows (A and B) and set A to globally have a priority of 3, and B to globally have a priority of 1, this does _not_ guarantee that if there is one task from A and one from B in the queue, that A's task will be run first. + +However, _within_ A, if you enqueue one task with priority 3 and one with priority 1, the priority 3 task will be run first. + + + +A couple of common use cases for assigning priorities are things like: + +1. Having high-priority (e.g. paying, new, etc.) customers be prioritized over lower-priority ones, allowing them to get faster turnaround times on their tasks. +2. Having tasks triggered via your API run with higher priority than the same tasks triggered by a cron. + +## Setting priority for a task or workflow + +There are a few different ways to set priorities for tasks or workflows in Hatchet. + +### Workflow-level default priority + +First, you can set a default priority at the workflow level: + + + + + + + + + + + + + + + + + + + + + +This will assign the same default priority to all runs of this workflow (and all of the workflow's corresponding tasks), but will have no effect without also setting run-level priorities, since every run will use the same default. + +### Priority-on-trigger + +When you trigger a run, you can set the priority of the triggered run to override its default priority. + + + + + + + + + + + + + + + + + + + + + +Similarly, you can also assign a priority to scheduled and cron workflows. + + + + + + + + + + + + + + + + + + + + + +In these cases, the priority set on the trigger will override the default priority, so these runs will be processed ahead of lower-priority ones. diff --git a/frontend/docs/pages/v1/flow-control/rate-limits.mdx b/frontend/docs/pages/v1/flow-control/rate-limits.mdx new file mode 100644 index 0000000000..685ea1ee45 --- /dev/null +++ b/frontend/docs/pages/v1/flow-control/rate-limits.mdx @@ -0,0 +1,140 @@ +import { snippets } from "@/lib/generated/snippets"; +import { Snippet } from "@/components/code"; +import { Callout, Card, Cards, Steps, Tabs } from "nextra/components"; +import UniversalTabs from "@/components/UniversalTabs"; + +# Rate Limiting Step Runs in Hatchet + +Hatchet allows you to enforce rate limits on task runs, enabling you to control the rate at which your service runs consume resources, such as external API calls, database queries, or other services. By defining rate limits, you can prevent task runs from exceeding a certain number of requests per time window (e.g., per second, minute, or hour), ensuring efficient resource utilization and avoiding overloading external services. + +The state of active rate limits can be viewed in the dashboard in the `Rate Limit` resource tab. + +## Dynamic vs Static Rate Limits + +Hatchet offers two patterns for Rate Limiting task runs: + +1. [Dynamic Rate Limits](#dynamic-rate-limits): Allows for complex rate limiting scenarios, such as per-user limits, by using `input` or `additional_metadata` keys to upsert a limit at runtime. +2. [Static Rate Limits](#static-rate-limits): Allows for simple rate limiting for resources known prior to runtime (e.g., external APIs). + +## Dynamic Rate Limits + +Dynamic rate limits are ideal for complex scenarios where rate limits need to be partitioned by resources that are only known at runtime. + +This pattern is especially useful for: + +1. Rate limiting individual users or tenants +2. Implementing variable rate limits based on subscription tiers or user roles +3. Dynamically adjusting limits based on real-time system load or other factors + +### How It Works + +1. Define the dynamic rate limit key with a CEL (Common Expression Language) Expression on the key, referencing either `input` or `additional_metadata`. +2. Provide this key as part of the workflow trigger or event `input` or `additional_metadata` at runtime. +3. Hatchet will create or update the rate limit based on the provided key and enforce it for the step run. + + + Note: Dynamic keys are a shared resource, this means the same rendered cel on + multiple steps will be treated as one global rate limit. + + +### Declaring and Consuming Dynamic Rate Limits + + + +> Note: `dynamic_key` must be a CEL expression. `units` and `limits` can be either an integer or a CEL expression. + +We can add one or more rate limits to a task by adding the `rate_limits` configuration to the task definition. + + + + + +> Note: `dynamicKey` must be a CEL expression. `units` and `limit` can be either an integer or a CEL expression. + +We can add one or more rate limits to a task by adding the `rate_limits` configuration to the task definition. + + + + + +> Note: Go requires both a key and KeyExpr be set and the LimitValueExpr must be a CEL. + + + + + + + + + +## Static Rate Limits + +Static Rate Limits (formerly known as Global Rate Limits) are defined as part of your worker startup lifecycle prior to runtime. This model provides a single "source of truth" for pre-defined resources such as: + +1. External API resources that have a rate limit across all users or tenants +2. Database connection pools with a maximum number of concurrent connections +3. Shared computing resources with limited capacity + +### How It Works + +1. Declare static rate limits using the `put_rate_limit` method in the `Admin` client before starting your worker. +2. Specify the units of consumption for a specific rate limit key in each step definition using the `rate_limits` configuration. +3. Hatchet enforces the defined rate limits by tracking the number of units consumed by each step run across all workflow runs. + +If a step run exceeds the rate limit, Hatchet re-queues the step run until the rate limit is no longer exceeded. + +### Declaring Static Limits + +Define the static rate limits that can be consumed by any step run across all workflow runs using the `put_rate_limit` method in the `Admin` client within your code. + + + + + + + + + +{" "} + + + + + + + + + + + + + + + +### Consuming Static Rate Limits + +With your rate limit key defined, specify the units of consumption for a specific key in each step definition by adding the `rate_limits` configuration to your step definition in your workflow. + + + + + + + + + + + + + + + + + + + + + +### Limiting Workflow Runs + +To rate limit an entire workflow run, it's recommended to specify the rate limit configuration on the entry step (i.e., the first step in the workflow). This will gate the execution of all downstream steps in the workflow. diff --git a/frontend/docs/pages/home/index.mdx b/frontend/docs/pages/v1/index.mdx similarity index 56% rename from frontend/docs/pages/home/index.mdx rename to frontend/docs/pages/v1/index.mdx index bfe59d124b..a5baa0cd7e 100644 --- a/frontend/docs/pages/home/index.mdx +++ b/frontend/docs/pages/v1/index.mdx @@ -1,34 +1,47 @@ -import { Tabs, Callout } from "nextra/components"; +--- +asIndexPage: true +--- + +import { Callout } from "nextra/components"; +import LanguageSwitcher from "@/components/LanguageSwitcher"; # What is Hatchet? Hatchet is a modern orchestration platform that helps engineering teams build low-latency and high-throughput data ingestion and agentic AI pipelines. -You write simple functions, called [tasks](./home/your-first-task), in Python, Typescript, and Go and run them on [workers](./home/workers) in your own infrastructure. You can compose these tasks into [parent/child relationships](./home/child-spawning) or predefined as [Directed Acyclic Graphs (DAGs)](./home/dags) to build more complex pipelines, which we call [workflows](./home/orchestration). All tasks and workflows are **defined as code**, making them easy to version, test, and deploy. +You write functions in Python, Typescript, Go, or Ruby and let Hatchet handle scheduling, retries, fault tolerance, and observability. + + + +The core mental model has three parts: -Hatchet handles scheduling, complex assignment, fault tolerance, and observability so you can focus on building your application as you scale. +- **[Tasks](/v1/tasks)** — the fundamental unit of work. A task wraps a single function and gives Hatchet everything it needs to schedule, execute, and observe it. +- **[Workers](/v1/workers)** — long-running processes in your infrastructure that pick up and execute tasks. +- **[Durable Workflows](/v1/durable-workflows)** — compose multiple tasks into durable pipelines with dependencies, retries, and checkpointing. -## Use-Cases +All tasks and workflows are **defined as code**, making them easy to version, test, and deploy. + +## Use cases While Hatchet is a general-purpose orchestration platform, it's particularly well-suited for: -- **Real-time data processing pipelines**: for example, data ingestion which is crucial for keeping LLM contexts up-to-date, or ETL pipelines that require fast execution and high throughput. -- **AI agents**: a core number of Hatchet's features, like [webhooks](./home/webhooks), [child spawning](./home/child-spawning), and [dynamic workflows](./home/child-spawning) are designed to support AI agents. -- **Event-driven systems**: Hatchet's [eventing features](./home/run-on-event) allow you to build event-driven architectures without requiring additional infrastructure. +- **Real-time data processing** — data ingestion for keeping LLM contexts up-to-date, ETL pipelines that require fast execution and high throughput. +- **AI agents** — features like [webhooks](/v1/webhooks), [child spawning](/v1/durable-workflows/child-spawning), and dynamic workflows are designed to support agentic patterns. +- **Event-driven systems** — Hatchet's [eventing features](/v1/external-events/run-on-event) let you build event-driven architectures without additional infrastructure. ## Why Hatchet? -⚡️ **Low-Latency For Real-Time Workloads** - Sub-25ms task dispatch for hot workers with thousands of concurrent tasks. Smart assignment rules handle [rate-limits](./home/rate-limits), [fairness](./home/concurrency), and [priorities](./home/priority) without complex configuration. +⚡️ **Low-Latency For Real-Time Workloads** - Sub-25ms task dispatch for hot workers with thousands of concurrent tasks. Smart assignment rules handle [rate-limits](/v1/rate-limits), [fairness](/v1/concurrency), and [priorities](/v1/priority) without complex configuration. -🪨 **Durability for Long Running Jobs** - Every task invocation is durably logged to PostgreSQL. With [durable execution](./home/durable-execution), when jobs fail your workflow will resume exactly where you left off — no lost work, no duplicate LLM calls, no engineer headaches. +🪨 **Durability for Long Running Jobs** - Every task invocation is durably logged to PostgreSQL. With [durable execution](/v1/patterns/durable-task-execution), when jobs fail your workflow will resume exactly where you left off — no lost work, no duplicate LLM calls, no engineer headaches. 🧘‍♂️ **Zen Developer Experience** - Hatchet SDKs (Python, Typescript, and Go) are built with modern tooling and are designed to be easy to use. Hatchet has built-in observability and debugging tools for things like replays, logs, and alerts. If you plan on self-hosting or have requirements for an on-premise deployment, there are some additional considerations: -🐘 **Minimal Infra Dependencies** - Hatchet is built on top of PostgreSQL and for simple workloads, [its all you need](./self-hosting/hatchet-lite.mdx). +🐘 **Minimal Infra Dependencies** - Hatchet is built on top of PostgreSQL and for simple workloads, [its all you need](/self-hosting/hatchet-lite). -⬆️ **Fully Featured Open Source** - Hatchet is 100% MIT licensed, so you can run the same application code against [Hatchet Cloud](https://cloud.onhatchet.run) to get started quickly or [self-host](./self-hosting.mdx) when you need more control. +⬆️ **Fully Featured Open Source** - Hatchet is 100% MIT licensed, so you can run the same application code against [Hatchet Cloud](https://cloud.onhatchet.run) to get started quickly or [self-host](/self-hosting) when you need more control. ## Hatchet vs. Alternatives @@ -63,25 +76,8 @@ Hatchet has been battle-tested in production environments, processing billions o > "Hatchet enables Aevy to process up to 50,000 documents in under an hour through optimized parallel execution, compared to nearly a week with our previous setup." > — Ymir, CTO @ Aevy -## Quick Starts - -We have a number of quick start tutorials for getting up and running quickly with Hatchet: - -- [Hatchet Cloud Quickstart](./home/hatchet-cloud-quickstart.mdx) -- [Hatchet Self-Hosted Quickstarts](./self-hosting.mdx) - -We also have guides for getting started with the Hatchet SDKs: - -- [Python SDK Quickstart](https://github.com/hatchet-dev/hatchet-python-quickstart) -- [Typescript SDK Quickstart](https://github.com/hatchet-dev/hatchet-typescript-quickstart) -- [Go SDK Quickstart](https://github.com/hatchet-dev/hatchet-go-quickstart) - -## Learn More - -Ready to dive deeper? Explore these additional resources: - -**[Architecture](./home/architecture.mdx)** - Learn how Hatchet is built and designed for scale. +## Ready to dive deeper? -**[Guarantees & Tradeoffs](./home/guarantees-and-tradeoffs.mdx)** - Understand Hatchet's guarantees, limitations, and when to use it. +Check out the **[Architecture & Guarantees](/v1/architecture-and-guarantees)** page to learn how Hatchet is built, its guarantees, and when to use it. -Or get started with the **[Hatchet Cloud Quickstart](./home/hatchet-cloud-quickstart.mdx)** or **[self-hosting](./self-hosting.mdx)**. +Or get started with the **[Hatchet Cloud Quickstart](/v1/quickstart)** or **[self-hosting](/self-hosting)**. diff --git a/frontend/docs/pages/home/inter-service-triggering.mdx b/frontend/docs/pages/v1/inter-service-triggering.mdx similarity index 100% rename from frontend/docs/pages/home/inter-service-triggering.mdx rename to frontend/docs/pages/v1/inter-service-triggering.mdx diff --git a/frontend/docs/pages/home/logging.mdx b/frontend/docs/pages/v1/logging.mdx similarity index 100% rename from frontend/docs/pages/home/logging.mdx rename to frontend/docs/pages/v1/logging.mdx diff --git a/frontend/docs/pages/home/middleware.mdx b/frontend/docs/pages/v1/middleware.mdx similarity index 100% rename from frontend/docs/pages/home/middleware.mdx rename to frontend/docs/pages/v1/middleware.mdx diff --git a/frontend/docs/pages/v1/migrating/_meta.js b/frontend/docs/pages/v1/migrating/_meta.js new file mode 100644 index 0000000000..4e9d1fe3d1 --- /dev/null +++ b/frontend/docs/pages/v1/migrating/_meta.js @@ -0,0 +1,7 @@ +export default { + "v1-sdk-improvements": "SDK Improvements", + "migration-guide-engine": "Engine Migration Guide", + "migration-guide-python": "Python Migration Guide", + "migration-guide-typescript": "Typescript Migration Guide", + "migration-guide-go": "Go Migration Guide", +}; diff --git a/frontend/docs/pages/home/migration-guide-engine.mdx b/frontend/docs/pages/v1/migrating/migration-guide-engine.mdx similarity index 90% rename from frontend/docs/pages/home/migration-guide-engine.mdx rename to frontend/docs/pages/v1/migrating/migration-guide-engine.mdx index e568395a84..c2d4c5b767 100644 --- a/frontend/docs/pages/home/migration-guide-engine.mdx +++ b/frontend/docs/pages/v1/migrating/migration-guide-engine.mdx @@ -63,7 +63,7 @@ For instructions on upgrading to the latest SDKs, please refer to the following While we'd prefer to avoid any breaking changes, v1 is architecturally very different from v0, which means that the following APIs will be modified/replaced: - While we haven't published an official REST API doc, we have often recommended usage of the REST API in our SDKs to implement replays, retrieving task status, and dead-letter queueing. The current API for listing, cancelling and replaying workflow runs will not work against a v1 engine. We will be providing an upgrade path using new endpoints which are more conducive to bulk replays and cancellations. -- We will only be supporting [CEL-based concurrency keys](./concurrency.mdx), and we will not be supporting custom concurrency methods defined on the client. If you require custom logic to compute the concurrency key that can't be captured in a CEL expression, we recommend computing the key ahead of time and passing it as part of the input to the workflow. **Workflows registered against a v1 engine with a custom concurrency method (instead of an expression) will not use a concurrency queue.** +- We will only be supporting [CEL-based concurrency keys](/v1/concurrency), and we will not be supporting custom concurrency methods defined on the client. If you require custom logic to compute the concurrency key that can't be captured in a CEL expression, we recommend computing the key ahead of time and passing it as part of the input to the workflow. **Workflows registered against a v1 engine with a custom concurrency method (instead of an expression) will not use a concurrency queue.** - Concurrency queues previously did not respect the `ScheduleTimeout` value set on the workflow level, so concurrency queues had no timeouts. In v1, concurrency queues will respect the schedule timeout value as well. _These are the most important breaking changes, but we will add any small modifications to queueing/workflow behavior ahead of March 24th._ diff --git a/frontend/docs/pages/home/migration-guide-go.mdx b/frontend/docs/pages/v1/migrating/migration-guide-go.mdx similarity index 100% rename from frontend/docs/pages/home/migration-guide-go.mdx rename to frontend/docs/pages/v1/migrating/migration-guide-go.mdx diff --git a/frontend/docs/pages/home/migration-guide-python.mdx b/frontend/docs/pages/v1/migrating/migration-guide-python.mdx similarity index 93% rename from frontend/docs/pages/home/migration-guide-python.mdx rename to frontend/docs/pages/v1/migrating/migration-guide-python.mdx index a095f69c95..72d7a2fe9e 100644 --- a/frontend/docs/pages/home/migration-guide-python.mdx +++ b/frontend/docs/pages/v1/migrating/migration-guide-python.mdx @@ -32,7 +32,7 @@ In this example, we use a few more new SDK features: -See our [Pydantic documentation](./pydantic.mdx) for more. +See our [Pydantic documentation](/reference/python/pydantic) for more. #### Other Breaking Changes @@ -50,13 +50,13 @@ Typing improvements: Naming changes: -1. We no longer have nested `aio` clients for async methods. Instead, async methods throughout the entire SDK are prefixed by `aio_`, similar to [Langchain's use of the `a` prefix](https://python.langchain.com/docs/concepts/streaming/#stream-and-astream) to indicate async. For example, to run a task, you may now either use `workflow.run()` or `workflow.aio_run()`. +1. We no longer have nested `aio` clients for async methods. Instead, async methods throughout the entire SDK are prefixed by `aio_`, similar to [Langchain's use of the `a` prefix](https://python.langchain.com/docs/advanced-tasks/streaming/#stream-and-astream) to indicate async. For example, to run a task, you may now either use `workflow.run()` or `workflow.aio_run()`. 2. All functions on Hatchet clients are now _verbs_. For instance the way to list workflow runs is via `hatchet.runs.list()`. 3. `max_runs` on the worker has been renamed to `slots`. Removals: -1. `sync_to_async` has been removed. We recommend reading [our asyncio documentation](./asyncio.mdx) for our recommendations on handling blocking work in otherwise async tasks. +1. `sync_to_async` has been removed. We recommend reading [our asyncio documentation](/reference/python/asyncio) for our recommendations on handling blocking work in otherwise async tasks. Other miscellaneous changes: diff --git a/frontend/docs/pages/home/migration-guide-typescript.mdx b/frontend/docs/pages/v1/migrating/migration-guide-typescript.mdx similarity index 100% rename from frontend/docs/pages/home/migration-guide-typescript.mdx rename to frontend/docs/pages/v1/migrating/migration-guide-typescript.mdx diff --git a/frontend/docs/pages/home/v1-sdk-improvements.mdx b/frontend/docs/pages/v1/migrating/v1-sdk-improvements.mdx similarity index 96% rename from frontend/docs/pages/home/v1-sdk-improvements.mdx rename to frontend/docs/pages/v1/migrating/v1-sdk-improvements.mdx index 1420aee82a..fd2ef01b65 100644 --- a/frontend/docs/pages/home/v1-sdk-improvements.mdx +++ b/frontend/docs/pages/v1/migrating/v1-sdk-improvements.mdx @@ -18,7 +18,7 @@ The Hatchet SDKs have seen considerable improvements with the V1 release. ### Highlights -The Python SDK has a number of notable highlights to showcase for V1. Many of them have been highlighted elsewhere, such as [in the migration guide](./migration-guide-python.mdx), on the [Pydantic page](./pydantic.mdx), an in various examples. Here, we'll list out each of them, along with their motivations and benefits. +The Python SDK has a number of notable highlights to showcase for V1. Many of them have been highlighted elsewhere, such as [in the migration guide](./migration-guide-python.mdx), on the [Pydantic page](/reference/python/pydantic), an in various examples. Here, we'll list out each of them, along with their motivations and benefits. First and foremost: Many of the changes in the V1 Python SDK are motivated by improved support for type checking and validation across large codebases and in production use-cases. With that in mind, the main highlights in the V1 Python SDK are: @@ -41,13 +41,13 @@ Typing improvements: Naming changes: -1. We no longer have nested `aio` clients for async methods. Instead, async methods throughout the entire SDK are prefixed by `aio_`, similar to [Langchain's use of the `a` prefix](https://python.langchain.com/docs/concepts/streaming/#stream-and-astream) to indicate async. For example, to run a workflow, you may now either use `workflow.run()` or `workflow.aio_run()`. +1. We no longer have nested `aio` clients for async methods. Instead, async methods throughout the entire SDK are prefixed by `aio_`, similar to [Langchain's use of the `a` prefix](https://python.langchain.com/docs/advanced-tasks/streaming/#stream-and-astream) to indicate async. For example, to run a workflow, you may now either use `workflow.run()` or `workflow.aio_run()`. 2. All functions on Hatchet clients are now _verbs_. For instance, if something was named `hatchet.nounVerb` before, it now will be something more like `hatchet.verb_noun`. For example, `hatchet.runs.get_result` gets the result of a workflow run. 3. `timeout`, the execution timeout of a task, has been renamed to `execution_timeout` for clarity. Removals: -1. `sync_to_async` has been removed. We recommend reading [our asyncio documentation](./asyncio.mdx) for our recommendations on handling blocking work in otherwise async tasks. +1. `sync_to_async` has been removed. We recommend reading [our asyncio documentation](/reference/python/asyncio) for our recommendations on handling blocking work in otherwise async tasks. 2. The `AdminClient` has been removed, and refactored into individual clients. For example, if you absolutely need to create a workflow run manually without using `Workflow.run` or `Standalone.run`, you can use `hatchet.runs.create`. This replaces the old `hatchet.admin.run_workflow`. Other miscellaneous changes: diff --git a/frontend/docs/pages/v1/mixing-patterns.mdx b/frontend/docs/pages/v1/mixing-patterns.mdx new file mode 100644 index 0000000000..4eb26a488e --- /dev/null +++ b/frontend/docs/pages/v1/mixing-patterns.mdx @@ -0,0 +1,90 @@ +import { Callout } from "nextra/components"; + +# Best Practices + +## Choosing a Pattern + +Use a **DAG** for any portion of work whose shape you know upfront, and use a **durable task** to orchestrate the parts whose shape is dynamic. You can mix them freely within the same application and even within the same workflow. + +| Scenario | Pattern | +| ---------------------------------------------- | -------------------------------------------- | +| Fixed pipeline, every step is known | DAG | +| Fixed pipeline, but one step needs a long wait | DAG with a durable task node | +| Dynamic orchestration of known pipelines | Durable task spawning DAGs | +| Fully dynamic, shape decided at runtime | Durable task spawning tasks/durable tasks | +| Agent that reasons and acts in a loop | Durable task spawning children per iteration | + +[DAGs](/v1/patterns/directed-acyclic-graphs) are inherently deterministic, since their shape is predefined and intermediate results are cached. If your workflow can be represented as a DAG, prefer that. Reach for a durable task only when you need capabilities a static graph can't express. + + + You don't have to choose one pattern for your entire application. Different + workflows can use different patterns, and a single workflow can mix them. + Start with the simplest pattern that fits and add complexity only when needed. + + +## Mixing Patterns + +### A durable task inside a DAG + +A DAG workflow can include a durable task as one of its nodes. The durable task checkpoints and waits like any other, while the rest of the DAG proceeds according to its declared dependencies. + +This is useful when most of your pipeline is a fixed graph but one step needs dynamic behavior, for example a pipeline where one stage runs an agentic loop that decides what to do at runtime. + +```mermaid +graph LR + A[Prepare Data] --> B[Durable: Agentic Loop] + B --> C[Publish Results] + style B stroke:#3392FF,stroke-dasharray: 5 5 +``` + +The durable task (`Agentic Loop`) can spawn children, sleep, wait for events, or loop until a condition is met. When it completes, the downstream `Publish Results` task runs automatically. + +### Spawning a DAG from a durable task + +A durable task can spawn an entire DAG workflow as a child, wait for its result, and then continue. This lets you use procedural control flow to decide _which_ pipeline to run and _how many times_ to run it, while the pipeline itself is a well-defined graph. + +```mermaid +graph TD + DT[Durable Task] -->|spawns| DAG1[DAG: Process Batch 1] + DT -->|spawns| DAG2[DAG: Process Batch 2] + DT -->|spawns| DAG3[DAG: Process Batch N] + DAG1 -->|result| DT + DAG2 -->|result| DT + DAG3 -->|result| DT + style DT stroke:#3392FF + style DAG1 stroke:#22C55E + style DAG2 stroke:#22C55E + style DAG3 stroke:#22C55E +``` + +The durable task decides at runtime how many batches to process, spawns a DAG workflow for each one, and collects the results. The DAG workflows run in parallel across your worker fleet while the durable task's slot is freed. + +### Durable tasks spawning durable tasks + +A durable task can spawn other durable tasks as children, each with their own checkpoints and event waits. This creates a tree of durable work that's entirely driven by runtime logic. + +```mermaid +graph TD + Root[Durable: Orchestrator] -->|spawns| A[Durable: Agent A] + Root -->|spawns| B[Durable: Agent B] + A -->|spawns| A1[Task: Subtask] + A -->|spawns| A2[Task: Subtask] + B -->|spawns| B1[Durable: Sub-Agent] + B1 -->|spawns| B1a[Task: Subtask] + style Root stroke:#3392FF,stroke-dasharray: 5 5 + style A stroke:#3392FF,stroke-dasharray: 5 5 + style B stroke:#3392FF,stroke-dasharray: 5 5 + style B1 stroke:#3392FF,stroke-dasharray: 5 5 +``` + +This pattern is ideal for agent-based systems where each level of the tree decides what to do next. Each durable task in the tree can sleep, wait for events, or spawn more children, and none of them hold a worker slot while waiting. + +## Determinism in Durable Tasks + +Durable tasks must be **deterministic** between checkpoints. The task should always perform the same sequence of operations in between retries. This is what allows Hatchet to replay the task from the last checkpoint. If a task is not deterministic, it may produce different results on each retry, which can lead to unexpected behavior. + +### Rules for determinism + +1. **Only call methods available on the `DurableContext`**: a common way to introduce non-determinism is to call methods that produce side effects. If you need to fetch data from a database, call an API, or otherwise interact with external systems, spawn those operations as a **child task** using `RunChild`. Durable tasks are [evicted](/v1/durable-workflows/task-eviction) at every wait point and replayed from checkpoint on resume. Any side effect not behind a checkpoint will re-execute. + +2. **When updating durable tasks, always guarantee backwards compatibility**: if you change the order of checkpoint operations in a durable task, you may break determinism. For example, if you call `SleepFor` followed by `WaitFor`, and then change the order of those calls, Hatchet will not be able to replay the task correctly. The task may have already been checkpointed at the first call to `SleepFor`, and changing the order makes that checkpoint meaningless. diff --git a/frontend/docs/pages/v1/observability/_meta.js b/frontend/docs/pages/v1/observability/_meta.js new file mode 100644 index 0000000000..3f13af331e --- /dev/null +++ b/frontend/docs/pages/v1/observability/_meta.js @@ -0,0 +1,8 @@ +export default { + index: { display: "hidden" }, + "worker-healthchecks": { display: "hidden" }, + logging: { display: "hidden" }, + opentelemetry: { display: "hidden" }, + "prometheus-metrics": { display: "hidden" }, + "additional-metadata": { display: "hidden" }, +}; diff --git a/frontend/docs/pages/v1/observability/additional-metadata.mdx b/frontend/docs/pages/v1/observability/additional-metadata.mdx new file mode 100644 index 0000000000..66354188fc --- /dev/null +++ b/frontend/docs/pages/v1/observability/additional-metadata.mdx @@ -0,0 +1,84 @@ +import { Callout, Card, Cards, Steps, Tabs } from "nextra/components"; +import UniversalTabs from "@/components/UniversalTabs"; +import { Snippet } from "@/components/code"; +import { snippets } from "@/lib/generated/snippets"; + +# Additional Metadata + +Hatchet allows you to attach arbitrary key-value string pairs to events and task runs, which can be used for filtering, searching, or any other lookup purposes. This additional metadata is not part of the event payload or task input data but provides supplementary information for better organization and discoverability. + + + Additional metadata can be added to `Runs`, `Scheduled Runs`, `Cron Runs`, and + `Events`. The data is propagated from parents to children or from events to + runs. + + +You can attach additional metadata when pushing events or triggering task runs using the Hatchet client libraries: + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +## Filtering in the Dashboard + +Once you've attached additional metadata to events or task runs, this data will be available in the Event and Task Run list views in the Hatchet dashboard. You can use the filter input field to search for events or task runs based on the additional metadata key-value pairs you've attached. + +For example, you can filter events by the `source` metadata keys to quickly find events originating from a specific source or environment. + +![Blocks](/addl-meta.gif) + +## Use Cases + +Some common use cases for additional metadata include: + +- Tagging events or task runs with environment information (e.g., `production`, `staging`, `development`) +- Specifying the source or origin of events (e.g., `api`, `webhook`, `manual`) +- Categorizing events or task runs based on business-specific criteria (e.g., `priority`, `region`, `product`) + +By leveraging additional metadata, you can enhance the organization, searchability, and discoverability of your events and task runs within Hatchet. diff --git a/frontend/docs/pages/v1/observability/index.mdx b/frontend/docs/pages/v1/observability/index.mdx new file mode 100644 index 0000000000..40750a0065 --- /dev/null +++ b/frontend/docs/pages/v1/observability/index.mdx @@ -0,0 +1,14 @@ +--- +title: Observability +description: Health checks, logging, metrics, and metadata for Hatchet workers and tasks. +--- + +# Observability + +Monitor and debug your Hatchet workers and tasks. + +- [Worker Health Checks](/v1/worker-healthchecks) — Monitor worker liveness +- [Logging](/v1/logging) — Structured task logging +- [OpenTelemetry](/v1/opentelemetry) — Distributed tracing +- [Prometheus Metrics](/v1/prometheus-metrics) — Export metrics +- [Additional Metadata](/v1/additional-metadata) — Tag runs for filtering and debugging diff --git a/frontend/docs/pages/v1/observability/logging.mdx b/frontend/docs/pages/v1/observability/logging.mdx new file mode 100644 index 0000000000..d6bda29b0f --- /dev/null +++ b/frontend/docs/pages/v1/observability/logging.mdx @@ -0,0 +1,75 @@ +import { snippets } from "@/lib/generated/snippets"; +import { Snippet } from "@/components/code"; +import { FileTree } from "nextra/components"; +import { Tabs, Callout } from "nextra/components"; +import UniversalTabs from "@/components/UniversalTabs"; + +# Logging + +Hatchet comes with a built-in logging view where you can push logs from your workflows. This is useful for debugging and monitoring your workflows. + + +{/* TODO V1 DOCS -- Add Go logging and add this tab back */} + +You can use either Python's built-in `logging` package, or the `context.log` method for more control over the logs that are sent. + +## Using the built-in `logging` package + +You can pass a custom logger to the `Hatchet` class when initializing it. For example: + + + +It's recommended that you pass the root logger to the `Hatchet` class, as this will ensure that all logs are captured by the Hatchet logger. If you have workflows defined in multiple files, they should be children of the root logger. For example, with the following file structure: + + + + + + + + + +You should pass the root logger to the `Hatchet` class in `client.py`: + + + +And then in `workflows/workflow.py`, you should create a child logger: + + + +## Using the `context.log` method + +You can also use the `context.log` method to log messages from your workflows. This method is available on the `Context` object that is passed to each task in your workflow. For example: + + + +Each task is currently limited to 1000 log lines. + + + + + +In TypeScript, there are two options for logging from your tasks. The first is to use the `ctx.log()` method (from the `Context`) to send logs: + + + +This has the benefit of being easy to use out of the box (no setup required!), but it's limited in its flexibiliy and how pluggable it is with your existing logging setup. + +Hatchet also allows you to "bring your own" logger when you define a workflow: + + + +In this example, we create Pino logger that implement's Hatchet's `Logger` interface and pass it to the Hatchet client constructor. We can then use that logger in our steps: + + + + +{/* +TODO V1 DOCS - ADD GO LOGGING HERE + */} + + + + + + diff --git a/frontend/docs/pages/home/opentelemetry.mdx b/frontend/docs/pages/v1/observability/opentelemetry.mdx similarity index 100% rename from frontend/docs/pages/home/opentelemetry.mdx rename to frontend/docs/pages/v1/observability/opentelemetry.mdx diff --git a/frontend/docs/pages/home/prometheus-metrics.mdx b/frontend/docs/pages/v1/observability/prometheus-metrics.mdx similarity index 88% rename from frontend/docs/pages/home/prometheus-metrics.mdx rename to frontend/docs/pages/v1/observability/prometheus-metrics.mdx index 3c4c253385..7c848c1e79 100644 --- a/frontend/docs/pages/home/prometheus-metrics.mdx +++ b/frontend/docs/pages/v1/observability/prometheus-metrics.mdx @@ -3,7 +3,8 @@ import { Callout } from "nextra/components"; # Prometheus Metrics - Only available in the Growth tier and above on Hatchet Cloud + Only available in the Dedicated tier and above on Hatchet Cloud, [reach + out](https://hatchet.run/office-hours) to upgrade. Hatchet exports Prometheus Metrics for your tenant which can be scraped with services like Grafana and DataDog. diff --git a/frontend/docs/pages/home/worker-healthchecks.mdx b/frontend/docs/pages/v1/observability/worker-healthchecks.mdx similarity index 100% rename from frontend/docs/pages/home/worker-healthchecks.mdx rename to frontend/docs/pages/v1/observability/worker-healthchecks.mdx diff --git a/frontend/docs/pages/v1/on-failure.mdx b/frontend/docs/pages/v1/on-failure.mdx new file mode 100644 index 0000000000..128dbcac09 --- /dev/null +++ b/frontend/docs/pages/v1/on-failure.mdx @@ -0,0 +1,90 @@ +import { snippets } from "@/lib/generated/snippets"; +import { Snippet } from "@/components/code"; +import { Callout, Tabs } from "nextra/components"; +import UniversalTabs from "@/components/UniversalTabs"; + +# Error Handling + +When a task fails, you need a way to run cleanup logic, send notifications, or trigger compensating actions. Both durable tasks and DAGs support error handling, but the mechanism differs: durable tasks use standard try/catch blocks, while DAGs declare a special on-failure task. + + + + +## Try/Catch in Durable Tasks + +Durable tasks are regular functions, so you handle errors with your language's native error handling (`try`/`except` in Python, `try`/`catch` in TypeScript/Go). This gives you full control over what happens when a child task or operation fails. + +### Handling child task errors + +When spawning child tasks, wrap the call in a try/catch block to handle failures gracefully: + + + + + + + + + + + + + + + + + + + + + + +### Common patterns + +- **Retry with backoff** — Catch the error, sleep, and retry the child task. +- **Fallback logic** — If a primary path fails, spawn a different child task as a fallback. +- **Partial failure handling** — In a fan-out, collect results from successful children and handle failures individually rather than failing the entire workflow. +- **Cleanup** — Release resources, cancel in-progress work, or notify external systems. + + + + +## On-Failure Tasks + +The on-failure task is a special task that runs when any task in the workflow fails. It lets you handle errors, perform cleanup, or trigger notifications declaratively as part of the workflow definition. + +### Defining an on-failure task + +You can define an on-failure task on your workflow the same as you'd define any other task: + + + + + + Note: Only one on-failure task can be defined per workflow. + + + + + + + + + + + + + +The on-failure task will be executed only if any of the main tasks in the workflow fail. + +### Use cases + +- Performing cleanup tasks after a task failure in a workflow +- Sending notifications or alerts about the failure +- Logging additional information for debugging purposes +- Triggering a compensating action or a fallback task + + + diff --git a/frontend/docs/pages/v1/opentelemetry.mdx b/frontend/docs/pages/v1/opentelemetry.mdx new file mode 100644 index 0000000000..314a22d938 --- /dev/null +++ b/frontend/docs/pages/v1/opentelemetry.mdx @@ -0,0 +1,104 @@ +import { Callout } from "nextra/components"; +import { snippets } from "@/lib/generated/snippets"; +import { Snippet } from "@/components/code"; + +# OpenTelemetry + + + OpenTelemetry support is currently only available for the Python SDK. + + +Hatchet supports exporting traces from your tasks to an [OpenTelemetry Collector](https://opentelemetry.io/docs/collector/) to improve visibility into your Hatchet tasks. + +## Usage + +### Setup + +Hatchet's SDK provides an instrumentor that auto-instruments Hatchet code if you opt in. Setup is straightforward: + +First, install the `otel` extra with (e.g.) `pip install hatchet-sdk[otel]`. Then, import the instrumentor: + + + +You bring your own trace provider and plug it into the `HatchetInstrumentor`, call `instrument`, and that's it! + + + Check out the [OpenTelemetry + documentation](https://opentelemetry.io/docs/languages/python/instrumentation/) + for more information on how to set up a trace provider. + + +### Spans + +By default, Hatchet creates spans at the following points in the lifecycle of a task run: + +1. When a trigger is run on the client side, e.g. `run()` or `push()` is called. +2. When a worker handles a task event, such as starting to run the task or cancelling the task + +In addition, you'll get a handful of attributes set (prefixed by `hatchet.`) on the task run events, such as the task name and the worker ID, as well as success/failure states, and so on. + +Some other important notes: + +1. The instrumentor will automatically propagate the trace context between task runs, so if you spawn a task from another task, the child will correctly show up as a child of its parent in the trace waterfall. +2. You can exclude specific attributes from being attached to spans by providing the `otel` configuration option on the `ClientConfig` and passing a list of `excluded_attributes`, which come from [this list](https://github.com/hatchet-dev/hatchet/blob/main/sdks/python/hatchet_sdk/utils/opentelemetry.py). + +## Integrations + +Hatchet's instrumentor is easy to integrate with a number of third-party tracing tools. + +### Langfuse + +For example, you might be interested in using [Langfuse](https://langfuse.com/) for tracing an LLM-intensive application. + + + Note that this example uses Langfuse's [V3 (OTel-based) + SDK](https://langfuse.com/docs/sdk/python/sdk-v3). See their docs for more + information. + + +First, configure the Langfuse client [as described by their documentation](https://langfuse.com/docs/opentelemetry/example-python-sdk): + + + +Langfuse will set the global tracer provider, so you don't have to do it manually. + +Next, create an OpenAI client [using Langfuse's OpenAI wrapper `langfuse.openai` as a drop-in replacement for the default OpenAI](https://langfuse.com/docs/integrations/openai/python/get-started) client: + + + +And that's it! Now you're ready to instrument your Hatchet workers with Langfuse. For example, create a task like this: + + + +And finally, run the task to view the Langfuse traces (cost, usage, etc.) interspersed with Hatchet's traces, in addition to any other traces you may have: + + + +When you run this task, you'll see a trace like this in Langfuse! + +example-langfuse-trace diff --git a/frontend/docs/pages/v1/patterns/_meta.js b/frontend/docs/pages/v1/patterns/_meta.js new file mode 100644 index 0000000000..d7102a7471 --- /dev/null +++ b/frontend/docs/pages/v1/patterns/_meta.js @@ -0,0 +1,5 @@ +export default { + "durable-task-execution": "Durable Execution", + "directed-acyclic-graphs": "DAGs", + "mixing-patterns": "Best Practices", +}; diff --git a/frontend/docs/pages/home/dags.mdx b/frontend/docs/pages/v1/patterns/directed-acyclic-graphs.mdx similarity index 72% rename from frontend/docs/pages/home/dags.mdx rename to frontend/docs/pages/v1/patterns/directed-acyclic-graphs.mdx index 9b6e1dea93..815cfe1ee5 100644 --- a/frontend/docs/pages/home/dags.mdx +++ b/frontend/docs/pages/v1/patterns/directed-acyclic-graphs.mdx @@ -2,16 +2,42 @@ import { snippets } from "@/lib/generated/snippets"; import { Snippet } from "@/components/code"; import { Callout, Card, Cards, Steps, Tabs } from "nextra/components"; import UniversalTabs from "@/components/UniversalTabs"; +import WorkflowDiagram from "@/components/WorkflowDiagram"; +import PipelineDiagram from "@/components/PipelineDiagram"; # Declarative Workflow Design (DAGs) -Hatchet workflows are designed in a **Directed Acyclic Graph (DAG)** format, where each task is a node in the graph, and the dependencies between tasks are the edges. This structure ensures that workflows are organized, predictable, and free from circular dependencies. By defining the sequence and dependencies of tasks upfront, you can easily understand the actual runtime state as compared to the expected state when debugging or troubleshooting. +Hatchet workflows are designed in a **Directed Acyclic Graph (DAG)** format, where each task is a node in the graph, and the dependencies between tasks are the edges. This structure ensures that workflows are organized, predictable, and free from circular dependencies. + + + +## How DAG Workflows Work + + + +### You declare the graph + +Define tasks and their dependencies upfront. Hatchet knows the full shape of work before execution begins. + +### Hatchet executes in order + +Tasks run as soon as their parents complete. Independent tasks run in parallel automatically. A worker slot is only assigned when a task is ready to execute, so tasks waiting on parents consume no resources. Each task has configurable [retry policies](/v1/retry-policies) and [timeouts](/v1/timeouts). + +### Results flow downstream + +Task outputs are cached and passed to child tasks. If a failure occurs mid-workflow, completed tasks don't re-run. + +### Everything is observable + +Every task execution is tracked in the dashboard — inputs, outputs, durations, and errors. You can see exactly where a workflow succeeded or failed. + + ## Defining a Workflow Start by declaring a workflow with a name. The workflow object can declare additional workflow-level configuration options which we'll cover later. -The returned object is an instance of the `Workflow` class, which is the primary interface for interacting with the workflow (i.e. [running](./run-with-results.mdx), [enqueuing](./run-no-wait.mdx), [scheduling](./scheduled-runs.mdx), etc). +The returned object is an instance of the `Workflow` class, which is the primary interface for interacting with the workflow (i.e. [running](/v1/running-your-task#run-and-wait), [enqueuing](/v1/running-your-task#fire-and-forget), [scheduling](/v1/scheduled-runs), etc). @@ -36,8 +62,8 @@ The returned object is an instance of the `Workflow` class, which is the primary The Workflow return object can be interacted with in the same way as a - [task](./your-first-task.mdx), however, it can only take a subset of options - which are applied at the task level. + [task](/v1/tasks), however, it can only take a subset of options which are + applied at the task level. ## Defining a Task @@ -46,7 +72,7 @@ Now that we have a workflow, we can define a task to be executed as part of the The `task` method takes a name and a function that defines the task's behavior. The function will receive the workflow's input and return the task's output. Tasks also accept a number of other configuration options, which are covered elsewhere in our documentation. - + In Python, the `task` method is a decorator, which is used like this to wrap a function: @@ -86,7 +112,7 @@ asynchronous. The power of Hatchet's workflow design comes from connecting tasks into a DAG structure. Tasks can specify dependencies (parents) which must complete successfully before the task can start. - + @@ -111,7 +137,7 @@ The power of Hatchet's workflow design comes from connecting tasks into a DAG st As shown in the examples above, tasks can access outputs from their parent tasks using the context object: - + @@ -141,7 +167,7 @@ if err != nil { You can run workflows directly or enqueue them for asynchronous execution. All the same methods for running a task are available for workflows! - + @@ -169,3 +195,9 @@ runID, err := simple.RunNoWait(ctx, input) + +## Pre-Determined Pipelines + +DAGs naturally model fixed multi-stage pipelines where the sequence of tasks and their dependencies are known before execution. ETL workflows, [document processing](/guides/document-processing) pipelines, and CI/CD workflows all follow this pattern: each stage depends on the previous, and the overall structure is visible and predictable in the dashboard. + + diff --git a/frontend/docs/pages/v1/patterns/durable-task-execution.mdx b/frontend/docs/pages/v1/patterns/durable-task-execution.mdx new file mode 100644 index 0000000000..5c574e828b --- /dev/null +++ b/frontend/docs/pages/v1/patterns/durable-task-execution.mdx @@ -0,0 +1,123 @@ +import { snippets } from "@/lib/generated/snippets"; +import { Snippet } from "@/components/code"; + +import { Callout, Steps } from "nextra/components"; +import DurableWorkflowDiagram from "@/components/DurableWorkflowDiagramWrapper"; + +# Durable Tasks + +Use durable tasks when **you don't know the shape of work ahead of time**. For example, an AI agent that picks its next action based on a model response, a fan-out where N is determined by the input data, or a pipeline that branches and spawns sub-workflows based on intermediate results. In all of these cases, the "graph" of work doesn't exist when the task starts; it emerges at runtime as the task makes decisions and [spawns children](/v1/durable-workflows/child-spawning). + +A durable task is a single long-running function that acts as an **orchestrator**: it spawns child tasks, waits for their results, makes decisions, and spawns more. Hatchet checkpoints its progress so it can recover from crashes, survive long waits, and resume on any worker without re-executing completed work. + + + If you know the full graph of work upfront (every task and dependency is fixed + before execution begins), use a [DAG](/v1/patterns/directed-acyclic-graphs) + instead. You can always [mix both patterns](/v1/patterns/mixing-patterns) in + the same application. + + +## When to Use Durable Tasks + +| Scenario | Why Durable? | +| --------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| **Dynamic fan-out** (N unknown) | Spawn children based on runtime data; wait for results without holding a slot. See [Batch Processing](/guides/batch-processing) and [Document Processing](/guides/document-processing). | +| **Agentic workflows** | An agent decides what to do next, spawns subtasks, loops, or stops at runtime. See [AI Agents](/guides/ai-agents/reasoning-loop). | +| **Long waits** (hours/days) | Worker slots are freed during waits; no wasted compute. | +| **Human-in-the-loop** | Wait for approval events without holding resources. See [Human-in-the-Loop](/guides/human-in-the-loop). | +| **Multi-step with inline pauses** | `SleepFor` and `WaitForEvent` let you express complex procedural flows. | +| **Crash-resilient pipelines** | Automatically resume from checkpoints after failures. | + +## How It Works + +A durable task builds the workflow at runtime through **child spawning**. The task function runs, inspects data, and decides what to do next by spawning child tasks. The parent is [evicted](/v1/durable-workflows/task-eviction) while children execute, freeing its worker slot. When children complete, the parent resumes from its checkpoint and continues. + +```mermaid +sequenceDiagram + participant P as Durable Task + participant H as Hatchet + participant W as Workers + + P->>H: Spawn Child A, Child B, Child C...N + H-->>P: Evicted (slot freed) + H->>W: Schedule children across fleet + W->>H: Child results + H->>P: Resume from checkpoint + P->>P: Inspect results, decide next step + P->>H: Spawn more children, sleep, or finish +``` + +This is fundamentally different from a DAG, where every task and dependency is declared before execution begins. With durable tasks, the number of children, which branches to take, and whether to loop or stop are all determined by your code at runtime. + + + + + +### Checkpoints + +Each call to `SleepFor`, `WaitForEvent`, `WaitFor`, `Memo`, or `RunChild` creates a checkpoint in the durable event log. These checkpoints record the task's progress. + +### Worker slot is freed during waits + +When a durable task enters a wait (sleep, event, or child result), Hatchet [evicts](/v1/durable-workflows/task-eviction) it from the worker. The slot is immediately available for other tasks. + +### Task resumes from checkpoint + +When the wait completes, Hatchet re-queues the task on any available worker. It replays the event log up to the last checkpoint and resumes execution from there. Completed operations are not re-executed. + + + +## The Durable Context + +Declare a task as durable (using `durable_task` instead of `task`) and it receives a `DurableContext` instead of a normal `Context`. The `DurableContext` extends `Context` with methods for checkpointing and waiting: + +| Method | Purpose | +| ----------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | +| **`SleepFor(duration)`** | Pause for a fixed duration. Respects the original sleep time on restart; if interrupted after 23 of 24 hours, only sleeps 1 more hour. | +| **`WaitForEvent(key, expr)`** | Wait for an external event by key, with optional [CEL filter](https://github.com/google/cel-spec) expression on the payload. | +| **`WaitFor(conditions)`** | General-purpose wait accepting any combination of sleep conditions, event conditions, or or-groups. `SleepFor` and `WaitForEvent` are convenience wrappers around this method. | +| **`Memo(function)`** | Run functions whose outputs are memoized based on the input arguments. | +| **`RunChild(task, input)`** | Spawn a child task and wait for its result. The parent is evicted during the wait. | + +## Example Task + + + +Now add tasks to the workflow. The first is a regular task; the second is a durable task that sleeps and waits for an event: + + + + + The `durable_task` decorator gives the function a `DurableContext` instead of + a regular `Context`. This is the only difference in declaration; the task + registers and runs on the same worker as regular tasks. + + +If this task is interrupted at any time, it will continue from where it left off. If the task calls `ctx.aio_sleep_for` for 24 hours and is interrupted after 23 hours, it will only sleep for 1 more hour on restart. + +### Or Groups + +Durable tasks can combine multiple wait conditions using [or groups](/v1/durable-workflows/conditions#or-groups). For example, you could wait for either an event or a sleep (whichever comes first): + + + +## Spawning Child Tasks + +Child spawning is the primary way durable tasks build workflows at runtime. A durable task can spawn any runnable (regular tasks, other durable tasks, or entire DAG workflows), wait for results, and decide what to do next. + +| Child type | Example | +| ---------------- | --------------------------------------------------------------------------------- | +| **Regular task** | Spawn a stateless task for a quick computation or API call. | +| **Durable task** | Spawn another durable task that has its own checkpoints, sleeps, and event waits. | +| **DAG workflow** | Spawn an entire multi-task workflow and wait for its final output. | + +The parent is evicted while children execute, so it consumes no resources. The number and type of children can be determined dynamically based on input, intermediate results, or model outputs. + +See [Child Spawning](/v1/durable-workflows/child-spawning) for patterns and full examples. + + + For an in-depth look at how durable execution works internally, see [this blog + post](https://hatchet.run/blog/durable-execution). + diff --git a/frontend/docs/pages/v1/patterns/mixing-patterns.mdx b/frontend/docs/pages/v1/patterns/mixing-patterns.mdx new file mode 100644 index 0000000000..4eb26a488e --- /dev/null +++ b/frontend/docs/pages/v1/patterns/mixing-patterns.mdx @@ -0,0 +1,90 @@ +import { Callout } from "nextra/components"; + +# Best Practices + +## Choosing a Pattern + +Use a **DAG** for any portion of work whose shape you know upfront, and use a **durable task** to orchestrate the parts whose shape is dynamic. You can mix them freely within the same application and even within the same workflow. + +| Scenario | Pattern | +| ---------------------------------------------- | -------------------------------------------- | +| Fixed pipeline, every step is known | DAG | +| Fixed pipeline, but one step needs a long wait | DAG with a durable task node | +| Dynamic orchestration of known pipelines | Durable task spawning DAGs | +| Fully dynamic, shape decided at runtime | Durable task spawning tasks/durable tasks | +| Agent that reasons and acts in a loop | Durable task spawning children per iteration | + +[DAGs](/v1/patterns/directed-acyclic-graphs) are inherently deterministic, since their shape is predefined and intermediate results are cached. If your workflow can be represented as a DAG, prefer that. Reach for a durable task only when you need capabilities a static graph can't express. + + + You don't have to choose one pattern for your entire application. Different + workflows can use different patterns, and a single workflow can mix them. + Start with the simplest pattern that fits and add complexity only when needed. + + +## Mixing Patterns + +### A durable task inside a DAG + +A DAG workflow can include a durable task as one of its nodes. The durable task checkpoints and waits like any other, while the rest of the DAG proceeds according to its declared dependencies. + +This is useful when most of your pipeline is a fixed graph but one step needs dynamic behavior, for example a pipeline where one stage runs an agentic loop that decides what to do at runtime. + +```mermaid +graph LR + A[Prepare Data] --> B[Durable: Agentic Loop] + B --> C[Publish Results] + style B stroke:#3392FF,stroke-dasharray: 5 5 +``` + +The durable task (`Agentic Loop`) can spawn children, sleep, wait for events, or loop until a condition is met. When it completes, the downstream `Publish Results` task runs automatically. + +### Spawning a DAG from a durable task + +A durable task can spawn an entire DAG workflow as a child, wait for its result, and then continue. This lets you use procedural control flow to decide _which_ pipeline to run and _how many times_ to run it, while the pipeline itself is a well-defined graph. + +```mermaid +graph TD + DT[Durable Task] -->|spawns| DAG1[DAG: Process Batch 1] + DT -->|spawns| DAG2[DAG: Process Batch 2] + DT -->|spawns| DAG3[DAG: Process Batch N] + DAG1 -->|result| DT + DAG2 -->|result| DT + DAG3 -->|result| DT + style DT stroke:#3392FF + style DAG1 stroke:#22C55E + style DAG2 stroke:#22C55E + style DAG3 stroke:#22C55E +``` + +The durable task decides at runtime how many batches to process, spawns a DAG workflow for each one, and collects the results. The DAG workflows run in parallel across your worker fleet while the durable task's slot is freed. + +### Durable tasks spawning durable tasks + +A durable task can spawn other durable tasks as children, each with their own checkpoints and event waits. This creates a tree of durable work that's entirely driven by runtime logic. + +```mermaid +graph TD + Root[Durable: Orchestrator] -->|spawns| A[Durable: Agent A] + Root -->|spawns| B[Durable: Agent B] + A -->|spawns| A1[Task: Subtask] + A -->|spawns| A2[Task: Subtask] + B -->|spawns| B1[Durable: Sub-Agent] + B1 -->|spawns| B1a[Task: Subtask] + style Root stroke:#3392FF,stroke-dasharray: 5 5 + style A stroke:#3392FF,stroke-dasharray: 5 5 + style B stroke:#3392FF,stroke-dasharray: 5 5 + style B1 stroke:#3392FF,stroke-dasharray: 5 5 +``` + +This pattern is ideal for agent-based systems where each level of the tree decides what to do next. Each durable task in the tree can sleep, wait for events, or spawn more children, and none of them hold a worker slot while waiting. + +## Determinism in Durable Tasks + +Durable tasks must be **deterministic** between checkpoints. The task should always perform the same sequence of operations in between retries. This is what allows Hatchet to replay the task from the last checkpoint. If a task is not deterministic, it may produce different results on each retry, which can lead to unexpected behavior. + +### Rules for determinism + +1. **Only call methods available on the `DurableContext`**: a common way to introduce non-determinism is to call methods that produce side effects. If you need to fetch data from a database, call an API, or otherwise interact with external systems, spawn those operations as a **child task** using `RunChild`. Durable tasks are [evicted](/v1/durable-workflows/task-eviction) at every wait point and replayed from checkpoint on resume. Any side effect not behind a checkpoint will re-execute. + +2. **When updating durable tasks, always guarantee backwards compatibility**: if you change the order of checkpoint operations in a durable task, you may break determinism. For example, if you call `SleepFor` followed by `WaitFor`, and then change the order of those calls, Hatchet will not be able to replay the task correctly. The task may have already been checkpointed at the first call to `SleepFor`, and changing the order makes that checkpoint meaningless. diff --git a/frontend/docs/pages/home/priority.mdx b/frontend/docs/pages/v1/priority.mdx similarity index 95% rename from frontend/docs/pages/home/priority.mdx rename to frontend/docs/pages/v1/priority.mdx index 9b67c7e318..8f44d418cb 100644 --- a/frontend/docs/pages/home/priority.mdx +++ b/frontend/docs/pages/v1/priority.mdx @@ -56,7 +56,7 @@ This will assign the same default priority to all runs of this workflow (and all When you trigger a run, you can set the priority of the triggered run to override its default priority. - + @@ -78,7 +78,7 @@ When you trigger a run, you can set the priority of the triggered run to overrid Similarly, you can also assign a priority to scheduled and cron workflows. - + diff --git a/frontend/docs/pages/v1/prometheus-metrics.mdx b/frontend/docs/pages/v1/prometheus-metrics.mdx new file mode 100644 index 0000000000..7c848c1e79 --- /dev/null +++ b/frontend/docs/pages/v1/prometheus-metrics.mdx @@ -0,0 +1,45 @@ +import { Callout } from "nextra/components"; + +# Prometheus Metrics + + + Only available in the Dedicated tier and above on Hatchet Cloud, [reach + out](https://hatchet.run/office-hours) to upgrade. + + +Hatchet exports Prometheus Metrics for your tenant which can be scraped with services like Grafana and DataDog. + +## Tenant Metrics + +Only works with v1 tenants + +Metrics for individual tenants are available in Prometheus Text Format via a REST API endpoint. + +### Endpoint + +``` +GET /api/v1/tenants/{tenantId}/prometheus-metrics +``` + +### Authentication + +The endpoint requires Bearer token authentication using a valid API token: + +``` +Authorization: Bearer +``` + +### Response Format + +The response is returned in standard Prometheus Text Format, including: + +- HELP comments describing each metric +- TYPE declarations (counter, gauge, etc.) +- Metric samples with labels and values + +### Example Usage + +```bash +curl -H "Authorization: Bearer your-api-token-here" \ + https://cloud.onhatchet.run/api/v1/tenants/707d0855-80ab-4e1f-a156-f1c4546cbf52/prometheus-metrics +``` diff --git a/frontend/docs/pages/home/hatchet-cloud-quickstart.mdx b/frontend/docs/pages/v1/quickstart.mdx similarity index 76% rename from frontend/docs/pages/home/hatchet-cloud-quickstart.mdx rename to frontend/docs/pages/v1/quickstart.mdx index 5fb8405913..17d4d1b44b 100644 --- a/frontend/docs/pages/home/hatchet-cloud-quickstart.mdx +++ b/frontend/docs/pages/v1/quickstart.mdx @@ -1,3 +1,7 @@ +--- +asIndexPage: true +--- + import { snippets } from "@/lib/generated/snippets"; import { Snippet } from "@/components/code"; import { Callout, Card, Cards, Steps, Tabs } from "nextra/components"; @@ -5,9 +9,13 @@ import UniversalTabs from "../../components/UniversalTabs"; # Hatchet Cloud Quickstart -Welcome to Hatchet! This guide walks you through getting set up on Hatchet Cloud. If you'd like to self-host Hatchet, please see the [self-hosted quickstart](../../self-hosting/) instead. +By the end of this guide you'll have a worker running locally that executes a simple task triggered from the CLI. -## Quickstart + + This guide walks you through getting set up on Hatchet Cloud. If you'd like to + self-host Hatchet, please see the [self-hosted quickstart](/self-hosting) + instead. + @@ -78,15 +86,21 @@ Finally, you can trigger your workflow using the `hatchet trigger simple` comman hatchet trigger simple ``` -### (Optional) Install Hatchet docs MCP +### (Optional) Install Hatchet docs MCP and Agent Skills -Get Hatchet documentation directly in your AI coding assistant (Cursor, Claude Code, Claude Desktop, and more): +Get Hatchet documentation directly in your AI coding assistant (Cursor, Claude Code, and more): ```sh copy hatchet docs install ``` -See the [full setup guide](./install-docs-mcp.mdx) for manual configuration options. +Get agent skills for common CLI operations: + +```sh copy +hatchet skills install +``` + +See the [full setup guide](/v1/using-coding-agents) for manual configuration options. @@ -94,4 +108,4 @@ And that's it! You should now have a Hatchet project set up on Hatchet Cloud wit ## Next Steps -Once you've completed the quickstart, you can explore a more in-depth walkthrough of Hatchet using the [walkthrough](/home/setup) guide. +Once you've completed the quickstart, continue to the next section to learn how to [create your first task](/v1/tasks). diff --git a/frontend/docs/pages/home/rate-limits.mdx b/frontend/docs/pages/v1/rate-limits.mdx similarity index 97% rename from frontend/docs/pages/home/rate-limits.mdx rename to frontend/docs/pages/v1/rate-limits.mdx index a850cb86da..6c93caf583 100644 --- a/frontend/docs/pages/home/rate-limits.mdx +++ b/frontend/docs/pages/v1/rate-limits.mdx @@ -87,7 +87,7 @@ If a step run exceeds the rate limit, Hatchet re-queues the step run until the r Define the static rate limits that can be consumed by any step run across all workflow runs using the `put_rate_limit` method in the `Admin` client within your code. - + @@ -115,7 +115,7 @@ Define the static rate limits that can be consumed by any step run across all wo With your rate limit key defined, specify the units of consumption for a specific key in each step definition by adding the `rate_limits` configuration to your step definition in your workflow. - + diff --git a/frontend/docs/pages/v1/region-availability.mdx b/frontend/docs/pages/v1/region-availability.mdx new file mode 100644 index 0000000000..4df12af954 --- /dev/null +++ b/frontend/docs/pages/v1/region-availability.mdx @@ -0,0 +1,20 @@ +# Region availability + +Hatchet Cloud is available in multiple regions so you can run workloads close to your users and data. + +## Current regions + +**Hatchet Cloud** ([cloud.onhatchet.run](https://cloud.onhatchet.run)) is currently deployed in **us-west-2** (Oregon). + +We are expanding availability. Planned or available regions include: + +| Region | Location | Status | +| -------------- | ---------------- | ------------ | +| us-west-2 | Oregon (US) | **Live** | +| us-east-1 | N. Virginia (US) | Private Beta | +| eu-west-1 | Ireland | Private Beta | +| ap-southeast-2 | Sydney | Private Beta | + +## Request a region + +We are always open to rolling out new regions based on demand. If you need a specific region for latency or compliance, [contact us](https://hatchet.run/contact) and we can discuss availability. diff --git a/frontend/docs/pages/home/retry-policies.mdx b/frontend/docs/pages/v1/retry-policies.mdx similarity index 96% rename from frontend/docs/pages/home/retry-policies.mdx rename to frontend/docs/pages/v1/retry-policies.mdx index 43b9b93fcc..8a70777a2f 100644 --- a/frontend/docs/pages/home/retry-policies.mdx +++ b/frontend/docs/pages/v1/retry-policies.mdx @@ -52,7 +52,7 @@ Additionally, if a task interacts with external services or databases, you shoul If you need to access the current retry count within a task, you can use the `retryCount` method available in the task context: - + @@ -71,7 +71,7 @@ If you need to access the current retry count within a task, you can use the `re Hatchet also supports exponential backoff for retries, which can be useful for handling failures in a more resilient manner. Exponential backoff increases the delay between retries exponentially, giving the failing service more time to recover before the next retry. - + @@ -94,7 +94,7 @@ The Hatchet SDKs each expose a `NonRetryable` exception, which allows you to byp 2. A task that contains a single non-idempotent operation that can fail but cannot safely be rerun on failure, such as a billing operation. 3. A failure that requires manual intervention to resolve. - + diff --git a/frontend/docs/pages/v1/runnables/_meta.js b/frontend/docs/pages/v1/runnables/_meta.js new file mode 100644 index 0000000000..8767918bb0 --- /dev/null +++ b/frontend/docs/pages/v1/runnables/_meta.js @@ -0,0 +1,11 @@ +export default { + index: { display: "hidden" }, + tasks: { display: "hidden" }, + "run-with-results": { display: "hidden" }, + "run-no-wait": { display: "hidden" }, + "scheduled-runs": { display: "hidden" }, + "cron-runs": { display: "hidden" }, + "bulk-run": { display: "hidden" }, + webhooks: { display: "hidden" }, + "inter-service-triggering": { display: "hidden" }, +}; diff --git a/frontend/docs/pages/v1/runnables/bulk-run.mdx b/frontend/docs/pages/v1/runnables/bulk-run.mdx new file mode 100644 index 0000000000..8632ae5f8f --- /dev/null +++ b/frontend/docs/pages/v1/runnables/bulk-run.mdx @@ -0,0 +1,67 @@ +import { Callout, Card, Cards, Steps, Tabs } from "nextra/components"; +import UniversalTabs from "@/components/UniversalTabs"; +import { Snippet } from "@/components/code"; +import { snippets } from "@/lib/generated/snippets"; + +# Bulk Run Many Tasks + +Often you may want to run a task multiple times with different inputs. There is significant overhead (i.e. network roundtrips) to write the task, so if you're running multiple tasks, it's best to use the bulk run methods. + + + + +You can use the `aio_run_many` method to bulk run a task. This will return a list of results. + + + + + `Workflow.create_bulk_run_item` is a typed helper to create the inputs for + each task. + + +There are additional bulk methods available on the `Workflow` object. + +- `aio_run_many` +- `aio_run_many_no_wait` + +And blocking variants: + +- `run_many` +- `run_many_no_wait` + +As with the run methods, you can call bulk methods from within a task and the runs will be associated with the parent task in the dashboard. + + + + +You can use the `run` method directly to bulk run tasks by passing an array of inputs. This will return a list of results. + + + +There are additional bulk methods available on the `Task` object. + +- `run` +- `runNoWait` + +As with the run methods, you can call bulk methods on the task fn context parameter within a task and the runs will be associated with the parent task in the dashboard. + + + +Available bulk methods on the `Context` object are: - `bulkRunChildren` - `bulkRunChildrenNoWait` + + + + +You can use the `RunMany` method directly on the `Workflow` or `StandaloneTask` instance to bulk run tasks by passing an array of inputs. This will return a list of run IDs. + + + +Additional bulk methods are coming soon for the Go SDK. Join our [Discord](https://hatchet.run/discord) to stay up to date. + + + + + + diff --git a/frontend/docs/pages/v1/runnables/cron-runs.mdx b/frontend/docs/pages/v1/runnables/cron-runs.mdx new file mode 100644 index 0000000000..1b245436af --- /dev/null +++ b/frontend/docs/pages/v1/runnables/cron-runs.mdx @@ -0,0 +1,206 @@ +import { snippets } from "@/lib/generated/snippets"; +import { Snippet } from "@/components/code"; +import { Tabs, Callout } from "nextra/components"; +import UniversalTabs from "@/components/UniversalTabs"; + +# Recurring Runs with Cron + +> This example assumes we have a [task](/v1/tasks) registered on a running [worker](/v1/workers). + +A [Cron](https://en.wikipedia.org/wiki/Cron) is a time-based job scheduler that allows you to define when a task should be executed automatically on a pre-determined schedule. + +Some example use cases for cron-style tasks might include: + +1. Running a daily report at a specific time. +2. Sending weekly digest emails to users about their activity from the past week. +3. Running a monthly billing process to generate invoices for customers. + +Hatchet supports cron triggers to run on a schedule defined in a few different ways: + +- [Task Definitions](/v1/cron-runs#defining-a-cron-in-your-task-definition): Define a cron expression in your task definition to trigger the task on a predefined schedule. +- [Dynamic Programmatically](/v1/cron-runs#programmatically-creating-cron-triggers): Use the Hatchet SDKs to dynamically set the cron schedule of a task. +- [Hatchet Dashboard](/v1/cron-runs#managing-cron-jobs-in-the-hatchet-dashboard): Manually create cron triggers from the Hatchet Dashboard. + + + The expression is when Hatchet **enqueues** the task, not when the run starts. + Scheduling constraints like concurrency limits, rate limits, and retry + policies can affect run start times. + + +### Cron Expression Syntax + +Cron expressions in Hatchet follow the standard cron syntax. A cron expression consists of five fields separated by spaces: + +``` +┌───────────── minute (0 - 59) +│ ┌───────────── hour (0 - 23) +│ │ ┌───────────── day of the month (1 - 31) +│ │ │ ┌───────────── month (1 - 12) +│ │ │ │ ┌───────────── day of the week (0 - 6) (Sunday to Saturday) +* * * * * +``` + +Each field can contain a specific value, an asterisk (`*`) to represent all possible values, or a range of values. Here are some examples of cron expressions: + +- `0 0 * * *`: Run every day at midnight +- `*/15 * * * *`: Run every 15 minutes +- `0 9 * * 1`: Run every Monday at 9 AM +- `0 0 1 * *`: Run on the first day of every month at midnight + +## Defining a Cron in Your Task Definition + +You can define a task with a cron schedule by configuring the cron expression as part of the task definition: + + + + + + + + + + + + + + + + + + + +In the examples above, we set the `on cron` property of the task. The property specifies the cron expression that determines when the task should be triggered. + + + Note: When modifying a cron in your task definition, it will override any cron + schedule for previous crons defined in previous task definitions, but crons + created via the API or Dashboard will still be respected. + + +## Programmatically Creating Cron Triggers + +### Create a Cron Trigger + +You can create dynamic cron triggers programmatically via the API. This is useful if you want to create a cron trigger that is not known at the time of task definition, + +Here's an example of creating a a cron to trigger a report for a specific customer every day at noon: + + + + + + + + + + + + + + + + + + + +In this example you can have different expressions for different customers, or dynamically set the expression based on some other business logic. + +When creating a cron via the API, you will receive a cron trigger object with a metadata property containing the id of the cron trigger. This id can be used to reference the cron trigger when deleting the cron trigger and is often stored in a database or other persistence layer. + + + Note: Cron Name and Expression are required fields when creating a cron + trigger and we enforce a unique constraint on the two. + + +### Delete a Cron Trigger + +You can delete a cron trigger by passing the cron object or a cron trigger id to the delete method. + + + + + + + + + + + + + + + + + + + + + Note: Deleting a cron trigger will not cancel any currently running instances + of the task. It will simply stop the cron trigger from triggering the task + again. + + +### List Cron Triggers + +Retrieves a list of all task cron triggers matching the criteria. + + + + + + + + + + + + + + + + + + + +## Managing Cron Triggers in the Hatchet Dashboard + +In the Hatchet Dashboard, you can view and manage cron triggers for your tasks. + +Navigate to "Triggers" > "Cron Jobs" in the left sidebar and click "Create Cron Job" at the top right. + +You can specify run parameters such as Input, Additional Metadata, and the Expression. + +![Create Cron Job](/cron-dash.gif) + +## Cron Considerations + +When using cron triggers, there are a few considerations to keep in mind: + +1. **Time Zone**: Cron schedules are UTC. Make sure to consider the time zone when defining your cron expressions. + +2. **Execution Time**: The actual execution time of a cron-triggered task may vary slightly from the scheduled time. Hatchet makes a best-effort attempt to enqueue the task as close to the scheduled time as possible, but there may be slight delays due to system load or other factors. + +3. **Missed Schedules**: If a scheduled task is missed (e.g., due to system downtime), Hatchet will **not** automatically run the missed instances. It will wait for the next scheduled time to trigger the task. + +4. **Overlapping Schedules**: If a task is still running when the next scheduled time arrives, Hatchet will start a new instance of the task or respect the [concurrency](/v1/concurrency) policy. diff --git a/frontend/docs/pages/v1/runnables/index.mdx b/frontend/docs/pages/v1/runnables/index.mdx new file mode 100644 index 0000000000..1b4b19ecbf --- /dev/null +++ b/frontend/docs/pages/v1/runnables/index.mdx @@ -0,0 +1,14 @@ +--- +title: Triggers +description: Ways to trigger tasks in Hatchet beyond run and fire-and-forget. +--- + +# Triggers + +Additional trigger patterns for running tasks on schedules, in bulk, via webhooks, or across services. + +- [Scheduled Runs](/v1/scheduled-runs) — Run at a specific time +- [Cron Runs](/v1/cron-runs) — Run on a recurring schedule +- [Bulk Runs](/v1/bulk-run) — Trigger many runs at once +- [Webhooks](/v1/webhooks) — Trigger via HTTP webhooks +- [Inter-Service Triggering](/v1/inter-service-triggering) — Trigger across services diff --git a/frontend/docs/pages/v1/runnables/inter-service-triggering.mdx b/frontend/docs/pages/v1/runnables/inter-service-triggering.mdx new file mode 100644 index 0000000000..ba4dad015d --- /dev/null +++ b/frontend/docs/pages/v1/runnables/inter-service-triggering.mdx @@ -0,0 +1,62 @@ +import { Callout, Tabs } from "nextra/components"; +import UniversalTabs from "@/components/UniversalTabs"; +import { Snippet } from "@/components/code"; +import { snippets } from "@/lib/generated/snippets"; + +## Invoking Tasks From Other Services + +While Hatchet recommends importing your workflows and standalone tasks directly to use for triggering runs, this only works in a monorepo or similar setups where you have access to those objects. However, it's common to have a polyrepo, have code written in multiple languages, or otherwise not be able to import your workflows and standalone tasks directly. Hatchet provides first-class, type-safe support for handling these cases as well, with only minor code duplication, to allow you to trigger your tasks from anywhere in a type-safe way. + +### Creating a "Stub" Task on your External Service (Recommended) + +The recommended way to trigger a run from a service where you _cannot_ import the workflow or standalone task definition directly is to create a "stub" task or workflow on your external service. This is a Hatchet task or workflow that has the same name and input/output types as the task you want to trigger on your Hatchet worker, but without the function or other configuration. + +This allows you to have a polyglot, fully typed interface with full SDK support. + + + + + + + + + +Consider a task with an implementation like this: + + + +To trigger this task from a separate service, for instance, in a microservices architecture, where the code is not shared, start by defining models that match the input and output types of the task defined above. + + + +Next, create the stub task. + + + +Finally, use the stub to trigger the underlying task, and (optionally) retrieve the result. + + + + + + + + + + + {/* TODO: add ruby snippet */} + + + + + Note that this approach requires code duplication, which can break type + safety. For instance, if the input type to your workflow changes, you need to + remember to also change the type passed to the stub. Some ways to mitigate + risks here are helpful comments reminding developers to keep these types in + sync, code generation tools, and end-to-end tests. + diff --git a/frontend/docs/pages/home/run-no-wait.mdx b/frontend/docs/pages/v1/runnables/run-no-wait.mdx similarity index 95% rename from frontend/docs/pages/home/run-no-wait.mdx rename to frontend/docs/pages/v1/runnables/run-no-wait.mdx index 564a0d1110..93f35487ce 100644 --- a/frontend/docs/pages/home/run-no-wait.mdx +++ b/frontend/docs/pages/v1/runnables/run-no-wait.mdx @@ -5,7 +5,7 @@ import UniversalTabs from "@/components/UniversalTabs"; # Enqueuing a Task Run (Fire and Forget) -> This example assumes we have a [task](./your-first-task.mdx) registered on a running [worker](./workers.mdx). +> This example assumes we have a [task](/v1/tasks) registered on a running [worker](/v1/workers). Another method of triggering a task in Hatchet is to _enqueue_ the task without waiting for it to complete, sometimes known as "fire and forget". This pattern is useful for tasks that take a long time to complete or are not critical to the immediate operation of your application. @@ -53,7 +53,7 @@ Note that the type of `input` here is a Pydantic model that matches the input sc Often it is useful to subscribe to the results of a task at a later time. The `run_no_wait` method returns a `WorkflowRunRef` object which includes a listener for the result of the task. - + Use `ref.result()` to block until the result is available: @@ -89,4 +89,4 @@ Navigate to "Task Runs" in the left sidebar and click "Trigger Run" at the top r You can specify run parameters such as Input, Additional Metadata, and the Scheduled Time. -![Create Scheduled Run](../../public/schedule-dash.gif) +![Create Scheduled Run](/schedule-dash.gif) diff --git a/frontend/docs/pages/home/run-with-results.mdx b/frontend/docs/pages/v1/runnables/run-with-results.mdx similarity index 91% rename from frontend/docs/pages/home/run-with-results.mdx rename to frontend/docs/pages/v1/runnables/run-with-results.mdx index e3618909e9..ec5bbbb17a 100644 --- a/frontend/docs/pages/home/run-with-results.mdx +++ b/frontend/docs/pages/v1/runnables/run-with-results.mdx @@ -5,7 +5,7 @@ import UniversalTabs from "@/components/UniversalTabs"; # Running with Results -> This example assumes we have a [task](./your-first-task.mdx) registered on a running [worker](./workers.mdx). +> This example assumes we have a [task](/v1/tasks) registered on a running [worker](/v1/workers). One method for running a task in Hatchet is to run it and wait for its result. Some example use cases for this type of task trigger include: @@ -44,7 +44,7 @@ Note that the type of `input` here is a Pydantic model that matches the input sc You can also spawn tasks from within a task. This is useful for composing tasks together to create more complex workflows, fanning out batched tasks, or creating conditional workflows. - + You can run a task from within a task by calling the `aio_run` method on the task object from within a task function. This will associate the runs in the dashboard for easier debugging. @@ -58,7 +58,7 @@ And that's it! The parent task will run and spawn the child task, and then will -You can run a task from within a task by calling the `runChild` method on the `ctx` parameter of the task function. This will associate the runs in the dashboard for easier debugging. +You can spawn a task from within a task by calling the `run` method on the task object from within a task function. This will associate the runs in the dashboard for easier debugging. @@ -79,7 +79,7 @@ You can run a task from within a task by calling the `Run` method on the task ob Sometimes you may want to run multiple tasks concurrently. Here's how to do that in each language: - + Since the `aio_run` method returns a coroutine, you can spawn multiple tasks in parallel and await using `asyncio.gather`. @@ -109,5 +109,5 @@ You can run multiple tasks in parallel by calling `Run` multiple times in gorout While you can run multiple tasks in parallel using the `Run` method, this is not recommended for large numbers of tasks. Instead, we recommend using [bulk - run methods](./bulk-run.mdx) for large parallel task execution. + run methods](/v1/bulk-run) for large parallel task execution. diff --git a/frontend/docs/pages/v1/runnables/scheduled-runs.mdx b/frontend/docs/pages/v1/runnables/scheduled-runs.mdx new file mode 100644 index 0000000000..c184e3418e --- /dev/null +++ b/frontend/docs/pages/v1/runnables/scheduled-runs.mdx @@ -0,0 +1,181 @@ +import { snippets } from "@/lib/generated/snippets"; +import { Snippet } from "@/components/code"; +import { Callout, Card, Cards, Steps, Tabs } from "nextra/components"; +import UniversalTabs from "@/components/UniversalTabs"; + +# Scheduled Runs + +> This example assumes we have a [task](/v1/tasks) registered on a running [worker](/v1/workers). + +Scheduled runs allow you to trigger a task at a specific time in the future. Some example use cases of scheduling runs might include: + +- Sending a reminder email at a specific time after a user took an action. +- Running a one-time maintenance task at a predetermined time as determined by your application. For instance, you might want to run a database vacuum during a maintenance window any time a task matches a certain criteria. +- Allowing a customer to decide when they want your application to perform a specific task. For instance, if your application is a simple alarm app that sends a customer a notification at a time that they specify, you might create a scheduled run for each alarm that the customer sets. + +Hatchet supports scheduled runs to run on a schedule defined in a few different ways: + +- [Programmatically](/v1/scheduled-runs#programmatically-creating-scheduled-runs): Use the Hatchet SDKs to dynamically set the schedule of a task. +- [Hatchet Dashboard](/v1/scheduled-runs#managing-scheduled-runs-in-the-hatchet-dashboard): Manually create scheduled runs from the Hatchet Dashboard. + + + The scheduled time is when Hatchet **enqueues** the task, not when the run + starts. Scheduling constraints like concurrency limits, rate limits, and retry + policies can affect run start times. + + +## Programmatically Creating Scheduled Runs + +### Create a Scheduled Run + +You can create dynamic scheduled runs programmatically via the API to run tasks at a specific time in the future. + +Here's an example of creating a scheduled run to trigger a task tomorrow at noon: + + + + + + + + + + + + + + + + + + +In this example you can have different scheduled times for different customers, or dynamically set the scheduled time based on some other business logic. + +When creating a scheduled run via the API, you will receive a scheduled run object with a metadata property containing the id of the scheduled run. This id can be used to reference the scheduled run when deleting the scheduled run and is often stored in a database or other persistence layer. + + + Note: Be mindful of the time zone of the scheduled run. Scheduled runs are + **always** stored and returned in UTC. + + +### Deleting a Scheduled Run + +You can delete a scheduled run by calling the `delete` method on the scheduled client. + + + + + + + + + + + + + + + + +### Listing Scheduled Runs + +You can list all scheduled runs for a task by calling the `list` method on the scheduled client. + + + + + + + + + + + + + + + + +### Rescheduling a Scheduled Run + +If you need to change the trigger time for an existing scheduled run, you can reschedule it by updating its `triggerAt`. + + + + + + + + + + + + + + + + + You can only reschedule scheduled runs created via the API (not runs created + via a code-defined schedule), and Hatchet may reject rescheduling if the run + has already triggered. + + +### Bulk operations (delete / reschedule) + +Hatchet supports bulk operations for scheduled runs. You can bulk delete scheduled runs, and you can bulk reschedule scheduled runs by providing a list of updates. + + + + + + + + + + + + + + + + + + +## Managing Scheduled Runs in the Hatchet Dashboard + +In the Hatchet Dashboard, you can view and manage scheduled runs for your tasks. + +Navigate to "Triggers" > "Scheduled Runs" in the left sidebar and click "Create Scheduled Run" at the top right. + +You can specify run parameters such as Input, Additional Metadata, and the Scheduled Time. + +![Create Scheduled Run](/schedule-dash.gif) + +You can also manage existing scheduled runs: + +- **Single-run actions**: Use the per-row actions menu to **Reschedule** or **Delete** an individual scheduled run. +- **Bulk actions**: Use the **Actions** menu to bulk **Delete** or **Reschedule** either: + - The selected rows, or + - All rows matching the current filters (including “all” if no filters are set). + + + In the dashboard, reschedule/delete actions may be disabled for runs that were + created via a code-defined schedule, and rescheduling may be disabled for runs + that have already triggered. + + +## Scheduled Run Considerations + +When using scheduled runs, there are a few considerations to keep in mind: + +1. **Time Zone**: Scheduled runs are stored and returned in UTC. Make sure to consider the time zone when defining your scheduled time. + +2. **Execution Time**: The actual execution time of a scheduled run may vary slightly from the scheduled time. Hatchet makes a best-effort attempt to enqueue the task as close to the scheduled time as possible, but there may be slight delays due to system load or other factors. + +3. **Missed Schedules**: If a scheduled task is missed (e.g., due to system downtime), Hatchet will not automatically run the missed instances when the service comes back online. + +4. **Overlapping Schedules**: If a task is still running when a second scheduled run is scheduled to start, Hatchet will start a new instance of the task or respect [concurrency](/v1/concurrency) policy. diff --git a/frontend/docs/pages/v1/runnables/tasks.mdx b/frontend/docs/pages/v1/runnables/tasks.mdx new file mode 100644 index 0000000000..2089606548 --- /dev/null +++ b/frontend/docs/pages/v1/runnables/tasks.mdx @@ -0,0 +1,107 @@ +import { snippets } from "@/lib/generated/snippets"; +import { Snippet } from "@/components/code"; +import { Callout, Tabs } from "nextra/components"; +import UniversalTabs from "@/components/UniversalTabs"; + +# Tasks + + + Just getting started? Check out the [essentials guide](/v1/tasks) to declare + and run your first task. + + +Everything you run in Hatchet is a **task** - a named function that you can trigger, retry, schedule, and observe. Tasks can be configured to handle the problems that come up in real systems: transient failures, resource contention, overloaded downstream services, and more. + +## Defining a task + +At minimum, a task needs a name and a function. The returned object is a **runnable** - you'll use it directly to [trigger](/v1/running-your-task) the task. + + + + + + + + + + + + + + + + +When you define a task, you are telling Hatchet: "here is a piece of work that a worker can pick up." The task carries a name, the function to run, and optional configuration. Tasks are registered on [workers](/v1/workers), which are the long-running processes that actually execute them. + +## Task lifecycle + +When you trigger a task, it moves through three phases: queued, running, and a terminal state. + +```mermaid +graph LR + Triggered --> QUEUED + QUEUED -->|Assigned to worker| RUNNING + RUNNING -->|Success| COMPLETED + RUNNING -->|Failure + retries left| QUEUED + RUNNING -->|Failure + no retries| FAILED +``` + +A task can also be **CANCELLED** at any point - either explicitly or by a [timeout](/v1/timeouts) expiring. + +## Triggering a task + +The runnable returned by a task definition supports several trigger methods: + +| Method | What it does | +| ---------------------------------------------------- | ------------------------------------------------------- | +| [Run](/v1/running-your-task#run-and-wait) | Trigger the task and wait for the result. | +| [Run no wait](/v1/running-your-task#fire-and-forget) | Enqueue the task and return immediately. | +| [Schedule](/v1/scheduled-runs) | Schedule the task to run at a specific time. | +| [Cron](/v1/cron-runs) | Run the task on a recurring schedule. | +| [Bulk run](/v1/bulk-run) | Trigger many instances of the task at once. | +| [On event](/v1/external-events/run-on-event) | Trigger the task automatically when an event is pushed. | +| [Webhook](/v1/webhooks) | Trigger the task from an external HTTP request. | + +## Configuring a task + +Tasks can be configured to handle common problems in distributed systems. For example, you might want to automatically retry a task when an external API returns a transient error, or limit how many instances of a task run at the same time to avoid overwhelming a downstream service. + +| Concept | What it does | +| ---------------------------------------------------------- | ---------------------------------------------------------- | +| [Retries](/v1/retry-policies) | Retry the task on failure, with optional backoff. | +| [Timeouts](/v1/timeouts) | Limit how long a task may wait to be scheduled or to run. | +| [Concurrency](/v1/concurrency) | Limit how many runs of this task execute at once. | +| [Rate limits](/v1/rate-limits) | Throttle task execution over a time window. | +| [Priority](/v1/priority) | Influence scheduling order relative to other queued tasks. | +| [Worker affinity](/v1/advanced-assignment/worker-affinity) | Prefer or require specific workers for this task. | + +## Input and output + +Every task receives an **input** - a JSON-serializable object passed when the task is triggered. The value you return from the task function becomes the task's **output**, which callers receive when they await the result. + +When a task is part of a [workflow](/v1/durable-workflows), its output is also available to downstream tasks through the context object, so data flows naturally from one step to the next. See [Accessing Parent Task Outputs](/v1/patterns/directed-acyclic-graphs#accessing-parent-task-outputs) for details. + +## The context object + +Every task function receives a **context** alongside its input. The context is your handle to the Hatchet runtime while the task is executing. Through it you can perform various operations: + +- **Runtime information** like the task's run ID, workflow ID, and more. +- **Check for cancellation** and respond to it gracefully ([Cancellation](/v1/cancellation)). +- **Refresh timeouts** if a long-running operation needs more time ([Timeouts](/v1/timeouts)). +- **Release a worker slot** early to free capacity for other tasks ([Manual Slot Release](/v1/advanced-assignment/manual-slot-release)). + +## How tasks execute on workers + +Tasks don't run on their own - they are assigned to and executed by [workers](/v1/workers). A worker is a long-running process in your infrastructure that registers one or more tasks with Hatchet. When a task is triggered, Hatchet places it in a queue and assigns it to an available worker that has registered that task. + +Each worker has a fixed number of **slots** that determine how many tasks it can run concurrently. When all slots are occupied, new tasks stay queued until a slot opens up. You can control this behavior further with [concurrency limits](/v1/concurrency), [rate limits](/v1/rate-limits), and [priority](/v1/priority). + +If you need tasks to run on specific workers - for example, because a worker has a GPU or a particular model loaded in memory - you can use [worker affinity](/v1/advanced-assignment/worker-affinity) or [sticky assignment](/v1/advanced-assignment/sticky-assignment) to influence where tasks are placed. + +## Tasks vs. workflows + +A task on its own is a standalone runnable - you can trigger it, wait for its result, schedule it, or fire it off without waiting. When you need to coordinate multiple tasks together (run B after A, fan out across N inputs, etc.), you compose them into a [workflow](/v1/durable-workflows). Both share the same trigger interface - the difference is scope. A task does one thing; a workflow orchestrates many things. + +Next, read about how tasks compose into [workflows](/v1/durable-workflows). diff --git a/frontend/docs/pages/home/webhooks.mdx b/frontend/docs/pages/v1/runnables/webhooks.mdx similarity index 99% rename from frontend/docs/pages/home/webhooks.mdx rename to frontend/docs/pages/v1/runnables/webhooks.mdx index 1e5bfb3455..c90cac6262 100644 --- a/frontend/docs/pages/home/webhooks.mdx +++ b/frontend/docs/pages/v1/runnables/webhooks.mdx @@ -86,4 +86,4 @@ While you're creating your webhook (and also after you've created it), you can c Once you've done that, the last thing to do is register the event keys you want your workers to listen for so that they can be triggered by incoming webhooks. -For examples on how to do this, see the [documentation on event triggers](./run-on-event.mdx). +For examples on how to do this, see the [documentation on event triggers](/v1/external-events/run-on-event). diff --git a/frontend/docs/pages/v1/running-your-task.mdx b/frontend/docs/pages/v1/running-your-task.mdx new file mode 100644 index 0000000000..719bf07cc0 --- /dev/null +++ b/frontend/docs/pages/v1/running-your-task.mdx @@ -0,0 +1,218 @@ +import { snippets } from "@/lib/generated/snippets"; +import { Snippet } from "@/components/code"; +import { Callout, Card, Cards, Steps, Tabs } from "nextra/components"; +import UniversalTabs from "@/components/UniversalTabs"; + +# Running Tasks + +With your task defined and a worker running, you can import the task wherever you need it and invoke it. + +## Run and wait + +Call a task and block until you get the result back. Use this for synchronous workflows like fan-out, LLM calls, or any time you need the output before continuing. + + + + +You can use your `Task` object to run a task and wait for it to complete by calling the `run` method. This method will block until the task completes and return the result. + + + +You can also `await` the result of `aio_run`: + + + +Note that the type of `input` here is a Pydantic model that matches the input schema of your workflow. + + + + +You can use your `Task` object to run a task and wait for it to complete by calling the `run` method. This method will return a promise that resolves when the task completes and returns the result. + + + + + + +You can use your `Task` object to run a task and wait for it to complete by calling the `Run` method. This method will block until the task completes and return the result. + + + + + + + + + + +### Spawning tasks from within a task + +You can spawn tasks from within a task. This is useful for composing tasks together, fanning out batched tasks, or creating conditional workflows. + + + + +You can run a task from within a task by calling the `aio_run` method on the task object from within a task function. This will associate the runs in the dashboard for easier debugging. + + + +The parent task will run and spawn the child task, then collect the results. + + + + +You can spawn a task from within a task by calling the `run` method on the task object from within a task function. This will associate the runs in the dashboard for easier debugging. + + + + + + +You can run a task from within a task by calling the `Run` method on the task object from within a task function. This will associate the runs in the dashboard for easier debugging. + + + + + + + + + +### Running tasks in parallel + + + + +Since the `aio_run` method returns a coroutine, you can spawn multiple tasks in parallel and await using `asyncio.gather`. + + + + + + +Since the `run` method returns a promise, you can spawn multiple tasks in parallel and await using `Promise.all`. + + + + + + +You can run multiple tasks in parallel by calling `Run` multiple times in goroutines and using a `sync.WaitGroup` to wait for them to complete. + + + + + + + + + + + While you can run multiple tasks in parallel using the `Run` method, this is + not recommended for large numbers of tasks. Instead, use [bulk run + methods](/v1/bulk-run) for large parallel task execution. + + +## Fire and forget + +Enqueue a task without waiting for the result. Use this for background jobs like sending emails, processing uploads, or kicking off long-running pipelines. + + + + + + +You can use your task object to enqueue a task by calling the `run_no_wait` method. This returns a `WorkflowRunRef` without waiting for the result. + + + +You can also `await` the result of `aio_run_no_wait`: + + + +Note that the type of `input` here is a Pydantic model that matches the input schema of your task. + + + + +You can use your task object to enqueue a task by calling the `run_no_wait` method. This returns a `WorkflowRunRef` without waiting for the result. + + + + + + +You can use your task object to enqueue a task by calling the `RunNoWait` method. This returns a `WorkflowRunRef` without waiting for the result. + + + + + + + + + + + +### Subscribing to results later + +The `run_no_wait` method returns a `WorkflowRunRef` which includes a listener for the result of the task, so you can subscribe at a later time. + + + + +Use `ref.result()` to block until the result is available: + + + +or await `aio_result`: + + + + + + + + + + + + + + + + +### Triggering from the dashboard + +In the Hatchet Dashboard, navigate to "Task Runs" in the left sidebar and click "Trigger Run" at the top right. You can specify run parameters such as Input, Additional Metadata, and the Scheduled Time. + +![Create Scheduled Run](../../public/schedule-dash.gif) + +## Where you can trigger from + +- **Same codebase or monorepo** - import your task and call `run`, `run_no_wait`, or other trigger methods directly. Your API server, CLI, or another service in the same repo can use the same task definition. +- **External API or separate service (polyrepo)** - when the triggering code can't import the task definition (different repo, language, or microservice), use a **stub**: a Hatchet task with the same name and input/output types but no implementation. See [Inter-Service Triggering](/v1/inter-service-triggering) for details. +- **From the CLI** - use the `hatchet run` command to trigger tasks from the command line. +- **From the Dashboard** - use the Hatchet dashboard to trigger tasks from the web interface. + +## Other trigger styles + +Hatchet supports additional trigger patterns for more advanced use cases: + +| Style | Use case | Doc | +| ------------- | ---------------------------------------------------- | ------------------------------------------------- | +| **Scheduled** | Run once at a specific time in the future | [Scheduled Trigger](/v1/scheduled-runs) | +| **Cron** | Run on a recurring schedule (daily, weekly, etc.) | [Cron Trigger](/v1/cron-runs) | +| **Events** | Run when an event is emitted (e.g. webhooks, queues) | [Event Trigger](/v1/external-events/run-on-event) | +| **Bulk** | Run the same task many times with different inputs | [Bulk Run Many](/v1/bulk-run) | +| **Webhooks** | Let external systems trigger workflows via HTTP | [Webhooks](/v1/webhooks) | + +## Next steps + +Now that you can run tasks, explore [Durable Workflows](/v1/durable-workflows) to compose multiple tasks into pipelines with dependencies and checkpointing. diff --git a/frontend/docs/pages/home/scheduled-runs.mdx b/frontend/docs/pages/v1/scheduled-runs.mdx similarity index 90% rename from frontend/docs/pages/home/scheduled-runs.mdx rename to frontend/docs/pages/v1/scheduled-runs.mdx index 7b7fa505e0..d4fb7e17cc 100644 --- a/frontend/docs/pages/home/scheduled-runs.mdx +++ b/frontend/docs/pages/v1/scheduled-runs.mdx @@ -5,7 +5,7 @@ import UniversalTabs from "@/components/UniversalTabs"; # Scheduled Runs -> This example assumes we have a [task](./your-first-task.mdx) registered on a running [worker](./workers.mdx). +> This example assumes we have a [task](/v1/tasks) registered on a running [worker](/v1/workers). Scheduled runs allow you to trigger a task at a specific time in the future. Some example use cases of scheduling runs might include: @@ -15,8 +15,8 @@ Scheduled runs allow you to trigger a task at a specific time in the future. Som Hatchet supports scheduled runs to run on a schedule defined in a few different ways: -- [Programmatically](./scheduled-runs.mdx#programmatically-creating-scheduled-runs): Use the Hatchet SDKs to dynamically set the schedule of a task. -- [Hatchet Dashboard](./scheduled-runs.mdx#managing-scheduled-runs-in-the-hatchet-dashboard): Manually create scheduled runs from the Hatchet Dashboard. +- [Programmatically](/v1/scheduled-runs#programmatically-creating-scheduled-runs): Use the Hatchet SDKs to dynamically set the schedule of a task. +- [Hatchet Dashboard](/v1/scheduled-runs#managing-scheduled-runs-in-the-hatchet-dashboard): Manually create scheduled runs from the Hatchet Dashboard. The scheduled time is when Hatchet **enqueues** the task, not when the run @@ -62,7 +62,7 @@ When creating a scheduled run via the API, you will receive a scheduled run obje You can delete a scheduled run by calling the `delete` method on the scheduled client. - + @@ -81,7 +81,7 @@ You can delete a scheduled run by calling the `delete` method on the scheduled c You can list all scheduled runs for a task by calling the `list` method on the scheduled client. - + @@ -100,7 +100,7 @@ You can list all scheduled runs for a task by calling the `list` method on the s If you need to change the trigger time for an existing scheduled run, you can reschedule it by updating its `triggerAt`. - + @@ -126,7 +126,7 @@ If you need to change the trigger time for an existing scheduled run, you can re Hatchet supports bulk operations for scheduled runs. You can bulk delete scheduled runs, and you can bulk reschedule scheduled runs by providing a list of updates. - + @@ -178,4 +178,4 @@ When using scheduled runs, there are a few considerations to keep in mind: 3. **Missed Schedules**: If a scheduled task is missed (e.g., due to system downtime), Hatchet will not automatically run the missed instances when the service comes back online. -4. **Overlapping Schedules**: If a task is still running when a second scheduled run is scheduled to start, Hatchet will start a new instance of the task or respect [concurrency](./concurrency.mdx) policy. +4. **Overlapping Schedules**: If a task is still running when a second scheduled run is scheduled to start, Hatchet will start a new instance of the task or respect [concurrency](/v1/concurrency) policy. diff --git a/frontend/docs/pages/v1/security.mdx b/frontend/docs/pages/v1/security.mdx new file mode 100644 index 0000000000..533b8ccc48 --- /dev/null +++ b/frontend/docs/pages/v1/security.mdx @@ -0,0 +1,36 @@ +# Security + +This page points you to Hatchet's security resources and highlights the most important security considerations for Hatchet Cloud and self-hosted deployments. + +## Trust center + +Hatchet is SOC 2 Type II, HIPAA, and GDPR compliant. Company-level security practices, compliance reports, and security documentation are available at the **[Hatchet Trust Center](https://trust.hatchet.run/)**. + +## Same source, same security + +Hatchet Cloud and self-hosted Hatchet run the same codebase. The open source project is 100% MIT licensed and undergoes regular third-party penetration testing. Findings are remediated across both deployment models, so security improvements benefit all users equally. + +## Hatchet Cloud + +Hatchet Cloud is Hatchet's managed service: + +- **Encryption in transit**: all API and worker traffic is encrypted with TLS. gRPC connections between workers and the engine use TLS by default. +- **Encryption at rest**: data stored in Hatchet Cloud is encrypted at rest. +- **Tenant isolation**: each tenant's data is logically isolated. Requests are authenticated and scoped to a single tenant. +- **Authentication**: API tokens are scoped per-tenant with configurable expiration. The dashboard supports SSO via Google, GitHub, and more coming soon. +- **Penetration testing**: Hatchet Cloud is regularly tested by independent security firms. Findings are tracked and remediated on a defined timeline. +- **Infrastructure**: Hatchet Cloud runs on AWS with private networking, automated patching, and centralized logging. + +For the definitive controls, policies, and compliance reports, refer to the **[Hatchet Trust Center](https://trust.hatchet.run/)**. + +## Self-hosted + +When you self-host Hatchet, your security posture depends on how you deploy and operate the Hatchet services and their dependencies. A practical baseline: + +- **Put TLS in front of the API**: terminate TLS at your ingress/load balancer (or directly on the API) and only expose it to the networks that need it. +- **Treat tokens and DB credentials as secrets**: use a secrets manager and rotate credentials; avoid committing secrets into git or baking them into images. +- **Limit network reachability**: restrict access to the Hatchet API and PostgreSQL to trusted networks (VPC, private subnets, or Kubernetes network policies). +- **Use least privilege**: run Hatchet with the minimum DB permissions needed; don't reuse "admin" DB credentials. +- **Stay current**: keep Hatchet and dependencies up to date to pick up security fixes. + +See [Self Hosting](/self-hosting) for deployment and configuration guidance, or [contact us](https://hatchet.run/contact) for help. diff --git a/frontend/docs/pages/v1/sleep.mdx b/frontend/docs/pages/v1/sleep.mdx new file mode 100644 index 0000000000..362bd4c451 --- /dev/null +++ b/frontend/docs/pages/v1/sleep.mdx @@ -0,0 +1,97 @@ +import { snippets } from "@/lib/generated/snippets"; +import { Snippet } from "@/components/code"; +import { Callout, Tabs } from "nextra/components"; +import UniversalTabs from "@/components/UniversalTabs"; +import LongWaitDiagram from "@/components/LongWaitDiagramWrapper"; + +# Sleep & Delays + +Sleep pauses a task for a specified duration while freeing the worker slot. No resources are consumed during the wait, whether the pause lasts seconds or weeks. + + + +Both durable tasks and DAGs support sleeping, but the API differs: durable tasks call `SleepFor` dynamically at runtime, while DAGs declare a sleep condition upfront on the task definition. + + + + +## Durable Sleep + +Durable sleep pauses execution for a specified amount of time and frees the worker slot until the sleep expires. + + + Sleeping puts the task into an [evictable + state](/v1/durable-workflows/task-eviction), the worker slot is freed and the + task is re-queued when the sleep expires. + + +Unlike a language-level sleep (e.g. `time.sleep` in Python or `setTimeout` in Node), durable sleep is guaranteed to respect the original duration across interruptions. A language-level sleep ties the wait to the local process, so if the process restarts, the sleep starts over from zero. + +For example, say you'd like to send a notification to a user after 24 hours. With `time.sleep`, if the task is interrupted after 23 hours, it will restart and sleep for 24 hours again (47 hours total). With durable sleep, Hatchet tracks the original deadline server-side, so the task will only sleep for 1 more hour on restart. + +### Using durable sleep + +Durable sleep can be used by calling the `SleepFor` method on the `DurableContext` object. This method takes a duration as an argument and will sleep for that duration. + + + + + + + + + + + + + + + + + + + + + + + + + +## Sleep Conditions + +Sleep conditions pause a DAG task for a specified duration before it runs. Use them when a task should wait for a fixed amount of time after its parent tasks complete. + +Unlike durable sleep (which is called dynamically at runtime), DAG sleep conditions are declared upfront on the task definition. Both free the worker slot during the wait. + +### Using sleep conditions + +Declare a task with a `wait_for` sleep condition. The task will wait for its parent tasks to complete, then sleep for the specified duration before executing. + + + + + + + + + + + + + + + + +This task will first wait for its parent to complete, then sleep for the specified duration before executing. + +### Combining with other conditions + +Sleep conditions can be combined with other conditions using or groups. For example, you can wait for _either_ a sleep duration or an event (whichever comes first). See [Combining Conditions](/v1/durable-workflows/conditions#or-groups) for details. + + + diff --git a/frontend/docs/pages/v1/streaming.mdx b/frontend/docs/pages/v1/streaming.mdx new file mode 100644 index 0000000000..dd286869d3 --- /dev/null +++ b/frontend/docs/pages/v1/streaming.mdx @@ -0,0 +1,111 @@ +import { snippets } from "@/lib/generated/snippets"; +import { Snippet } from "@/components/code"; +import { Callout, Card, Cards, Steps, Tabs } from "nextra/components"; +import UniversalTabs from "@/components/UniversalTabs"; + +# Streaming in Hatchet + +Hatchet tasks can stream data back to a consumer in real-time. This has a number of valuable uses, such as streaming the results of an LLM call back from a Hatchet worker to a frontend or sending progress updates as a task chugs along. + +## Publishing Stream Events + +You can stream data out of a task run by using the `put_stream` (or equivalent) method on the `Context`. + + + + + + + + + + + + + + + + + + + + + + +This task will stream small chunks of content through Hatchet, which can then be consumed elsewhere. Here we use some text as an example, but this is intended to replicate streaming the results of an LLM call back to a consumer. + +## Consuming Streams + +You can easily consume stream events by using the stream method on the workflow run ref that the various [fire-and-forget](/v1/running-your-task#fire-and-forget) methods return. + + + + + + + + + + + + + + + + + + + + + + +In the examples above, this will result in the famous text below being gradually printed to the console, bit by bit. + +``` +Happy families are all alike; every unhappy family is unhappy in its own way. + +Everything was in confusion in the Oblonskys' house. The wife had discovered that the husband was carrying on an intrigue with a French girl, who had been a governess in their family, and she had announced to her husband that she could not go on living in the same house with him. +``` + + + You must begin consuming the stream before any events are published. Any + events published before a consumer is initialized will be dropped. In + practice, this will not be an issue in most cases, but adding a short sleep + before beginning streaming results back can help. + + +## Streaming to a Web Application + +It's common to want to stream events out of a Hatchet task and back to the frontend of your application, for consumption by an end user. As mentioned before, some clear cases where this is useful would be for streaming back progress of some long-running task for a customer to monitor, or streaming back the results of an LLM call. + +In both cases, we recommend using your application's backend as a proxy for the stream, where you would subscribe to the stream of events from Hatchet, and then stream events through to the frontend as they're received by the backend. + + + + +For example, with FastAPI, you'd do the following: + + + + + + +For example, with NextJS backend-as-frontend, you'd do the following: + + + + + + +For example, with Go's built-in HTTP server, you'd do the following: + + + + + + + {/* TODO: add ruby snippet */} + + + +Then, assuming you run the server on port `8000`, running `curl -N http://localhost:8000/stream` would result in the text streaming back to your console from Hatchet through your FastAPI proxy. diff --git a/frontend/docs/pages/v1/task-eviction.mdx b/frontend/docs/pages/v1/task-eviction.mdx new file mode 100644 index 0000000000..bc22d9a9c4 --- /dev/null +++ b/frontend/docs/pages/v1/task-eviction.mdx @@ -0,0 +1,95 @@ +import { Callout, Tabs } from "nextra/components"; +import UniversalTabs from "@/components/UniversalTabs"; + +# Resource Management During Waits + +When a task needs to wait (for time, an event, or child results), how does Hatchet handle the worker slot? The answer depends on which pattern you're using. + + + + +## Task Eviction + +When a durable task enters a wait, whether from `SleepFor`, `WaitForEvent`, or `WaitFor`, Hatchet **evicts** the task from the worker. The worker slot is released, the task's progress is persisted in the durable event log, and the task does not consume slots or hold resources while it is idle. + +This is what makes durable tasks fundamentally different from regular tasks: a regular task consumes a slot for the entire duration of execution, even if it's just sleeping. A durable task gives the slot back the moment it starts waiting. + +### How eviction works + +```mermaid +graph LR + QUEUED -->|Assigned to worker| RUNNING + RUNNING -->|Hits SleepFor / WaitForEvent| EVICTED + EVICTED -->|Wait completes or event arrives| QUEUED +``` + +1. **Task reaches a wait.** The durable task calls `SleepFor`, `WaitForEvent`, or `WaitFor`. +2. **Checkpoint is written.** Hatchet records the current progress in the durable event log. +3. **Worker slot is freed.** The task is evicted from the worker. The slot is immediately available for other tasks. +4. **Wait completes.** When the sleep expires or the expected event arrives, Hatchet re-queues the task. +5. **Task resumes on any available worker.** A worker picks up the task, replays the event log to the last checkpoint, and continues execution from where it left off. + +The resumed task does not need to run on the same worker that originally started it. Any worker that has registered the task can pick it up. + +### Why eviction matters + +Without eviction, a task that sleeps for 24 hours would consume a slot for the entire duration, wasting capacity that could be running other work. With eviction, the slot is freed immediately. + +This is especially important for: + +- **Long waits** — Tasks that sleep for hours or days should not hold slots. +- **Human-in-the-loop** — Waiting for a human to approve or respond could take minutes or weeks. Eviction ensures no resources are held in the meantime. +- **Large fan-outs** — A parent task that spawns thousands of children and waits for results can release its slot while the children run, preventing deadlocks where the parent holds resources that the children need. + +### Separate slot pools + +Durable tasks consume slots from a **separate slot pool** than regular tasks. This prevents a common deadlock: if durable and regular tasks shared the same pool, a durable task waiting on child tasks could hold the very slot those children need to execute. + +By isolating slot pools, Hatchet ensures that durable tasks waiting on children never starve the workers that need to run those children. + +### Eviction and determinism + +Because a task may be evicted and resumed on a different worker at any time, the code between checkpoints must be [deterministic](/v1/patterns/mixing-patterns#determinism-in-durable-tasks). On resume, Hatchet replays the event log; it does not re-execute completed operations. If the code has changed between the original run and the replay, the checkpoint sequence may not match, leading to unexpected behavior. + + + + +## No Eviction Needed + +DAG tasks do not require eviction because they are **never assigned to a worker until they can actually run**. A worker slot is only allocated when all of the task's conditions are met: parent tasks have completed, sleep durations have elapsed, and expected events have arrived. + +This means resources are only consumed during active execution, never during waits. + +### How DAG scheduling works + +```mermaid +graph LR + PENDING -->|"All conditions met (parents, sleep, events)"| QUEUED + QUEUED -->|Assigned to worker| RUNNING + RUNNING -->|Completes| COMPLETED +``` + +1. **Task is pending.** The task exists in the workflow but is not queued. No worker slot is allocated. No resources are consumed. +2. **Conditions are met.** All parent tasks have completed, any sleep duration has elapsed, and any required events have arrived. +3. **Task is queued.** Only now does Hatchet place the task in the queue for worker assignment. +4. **Task runs to completion.** A worker picks up the task, executes it, and the slot is freed. + +### Why this matters + +Because DAG tasks are only scheduled when ready, there is no wasted capacity: + +- **Sleep conditions** — A task that waits 24 hours after its parent completes does not hold a slot. It sits in a pending state until the timer expires, then gets queued. +- **Event conditions** — A task waiting for an external event consumes no resources. When the event arrives, the task is queued and assigned a slot. +- **Parent dependencies** — Tasks waiting on upstream results are not queued until those results are available. + +This is one of the advantages of DAGs: the scheduling model is simpler. You declare the conditions upfront, and Hatchet handles the timing. There is no eviction, no checkpointing, and no replay, because the task never starts until it's ready to run straight through. + + + If you need a task to start running and then pause partway through (for + example, to wait for an event based on intermediate results), use a [durable + task](/v1/patterns/durable-task-execution) instead. DAG tasks run from start + to finish once scheduled. + + + + diff --git a/frontend/docs/pages/v1/tasks.mdx b/frontend/docs/pages/v1/tasks.mdx new file mode 100644 index 0000000000..300dac2758 --- /dev/null +++ b/frontend/docs/pages/v1/tasks.mdx @@ -0,0 +1,102 @@ +import { snippets } from "@/lib/generated/snippets"; +import { Snippet } from "@/components/code"; +import { Callout, Tabs } from "nextra/components"; +import UniversalTabs from "@/components/UniversalTabs"; + +# Tasks + +Everything you run in Hatchet is a **task** - a named function that you can trigger, retry, schedule, and observe. Tasks can be configured to handle the problems that come up in real systems: transient failures, resource contention, overloaded downstream services, and more. + +## Defining a task + +At minimum, a task needs a name and a function. The returned object is a **runnable** - you'll use it directly to [trigger](/v1/running-your-task) the task. + + + + + + + + + + + + + + + + +When you define a task, you are telling Hatchet: "here is a piece of work that a worker can pick up." The task carries a name, the function to run, and optional configuration. Tasks are registered on [workers](/v1/workers), which are the long-running processes that actually execute them. + +## Task lifecycle + +When you trigger a task, it moves through three phases: queued, running, and a terminal state. + +```mermaid +graph LR + Triggered --> QUEUED + QUEUED -->|Assigned to worker| RUNNING + RUNNING -->|Success| COMPLETED + RUNNING -->|Failure + retries left| QUEUED + RUNNING -->|Failure + no retries| FAILED +``` + +A task can also be **CANCELLED** at any point - either explicitly or by a [timeout](/v1/timeouts) expiring. + +## Triggering a task + +The runnable returned by a task definition supports several trigger methods: + +| Method | What it does | +| ---------------------------------------------------- | ------------------------------------------------------- | +| [Run](/v1/running-your-task#run-and-wait) | Trigger the task and wait for the result. | +| [Run no wait](/v1/running-your-task#fire-and-forget) | Enqueue the task and return immediately. | +| [Schedule](/v1/scheduled-runs) | Schedule the task to run at a specific time. | +| [Cron](/v1/cron-runs) | Run the task on a recurring schedule. | +| [Bulk run](/v1/bulk-run) | Trigger many instances of the task at once. | +| [On event](/v1/external-events/run-on-event) | Trigger the task automatically when an event is pushed. | +| [Webhook](/v1/webhooks) | Trigger the task from an external HTTP request. | + +## Configuring a task + +Tasks can be configured to handle common problems in distributed systems. For example, you might want to automatically retry a task when an external API returns a transient error, or limit how many instances of a task run at the same time to avoid overwhelming a downstream service. + +| Concept | What it does | +| ---------------------------------------------------------- | ---------------------------------------------------------- | +| [Retries](/v1/retry-policies) | Retry the task on failure, with optional backoff. | +| [Timeouts](/v1/timeouts) | Limit how long a task may wait to be scheduled or to run. | +| [Concurrency](/v1/concurrency) | Limit how many runs of this task execute at once. | +| [Rate limits](/v1/rate-limits) | Throttle task execution over a time window. | +| [Priority](/v1/priority) | Influence scheduling order relative to other queued tasks. | +| [Worker affinity](/v1/advanced-assignment/worker-affinity) | Prefer or require specific workers for this task. | + +## Input and output + +Every task receives an **input** - a JSON-serializable object passed when the task is triggered. The value you return from the task function becomes the task's **output**, which callers receive when they await the result. + +When a task is part of a [workflow](/v1/durable-workflows), its output is also available to downstream tasks through the context object, so data flows naturally from one step to the next. See [Accessing Parent Task Outputs](/v1/patterns/directed-acyclic-graphs#accessing-parent-task-outputs) for details. + +## The context object + +Every task function receives a **context** alongside its input. The context is your handle to the Hatchet runtime while the task is executing. Through it you can perform various operations: + +- **Runtime information** like the task's run ID, workflow ID, and more. +- **Check for cancellation** and respond to it gracefully ([Cancellation](/v1/cancellation)). +- **Refresh timeouts** if a long-running operation needs more time ([Timeouts](/v1/timeouts)). +- **Release a worker slot** early to free capacity for other tasks ([Manual Slot Release](/v1/advanced-assignment/manual-slot-release)). + +## How tasks execute on workers + +Tasks don't run on their own - they are assigned to and executed by [workers](/v1/workers). A worker is a long-running process in your infrastructure that registers one or more tasks with Hatchet. When a task is triggered, Hatchet places it in a queue and assigns it to an available worker that has registered that task. + +Each worker has a fixed number of **slots** that determine how many tasks it can run concurrently. When all slots are occupied, new tasks stay queued until a slot opens up. You can control this behavior further with [concurrency limits](/v1/concurrency), [rate limits](/v1/rate-limits), and [priority](/v1/priority). + +If you need tasks to run on specific workers - for example, because a worker has a GPU or a particular model loaded in memory - you can use [worker affinity](/v1/advanced-assignment/worker-affinity) or [sticky assignment](/v1/advanced-assignment/sticky-assignment) to influence where tasks are placed. + +## Tasks vs. workflows + +A task on its own is a standalone runnable - you can trigger it, wait for its result, schedule it, or fire it off without waiting. When you need to coordinate multiple tasks together (run B after A, fan out across N inputs, etc.), you compose them into a [workflow](/v1/durable-workflows). Both share the same trigger interface - the difference is scope. A task does one thing; a workflow orchestrates many things. + +Next, read about how tasks compose into [workflows](/v1/durable-workflows). diff --git a/frontend/docs/pages/home/timeouts.mdx b/frontend/docs/pages/v1/timeouts.mdx similarity index 98% rename from frontend/docs/pages/home/timeouts.mdx rename to frontend/docs/pages/v1/timeouts.mdx index fd41f153d6..077e2cbad2 100644 --- a/frontend/docs/pages/home/timeouts.mdx +++ b/frontend/docs/pages/v1/timeouts.mdx @@ -72,7 +72,7 @@ In some cases, you may need to extend the timeout for a step while it is running For example: - + diff --git a/frontend/docs/pages/v1/troubleshooting/_meta.js b/frontend/docs/pages/v1/troubleshooting/_meta.js new file mode 100644 index 0000000000..8bfbec6ddd --- /dev/null +++ b/frontend/docs/pages/v1/troubleshooting/_meta.js @@ -0,0 +1,3 @@ +export default { + index: "Troubleshooting Workers", +}; diff --git a/frontend/docs/pages/v1/troubleshooting/index.mdx b/frontend/docs/pages/v1/troubleshooting/index.mdx new file mode 100644 index 0000000000..55f0a9292f --- /dev/null +++ b/frontend/docs/pages/v1/troubleshooting/index.mdx @@ -0,0 +1,59 @@ +import { Tabs, Callout } from "nextra/components"; + +# Troubleshooting Hatchet Workers + +This guide covers common issues when deploying and operating Hatchet workers. + +## Quick debugging checklist + +Before diving into specific issues, run through these checks: + +1. **Verify your API token** — make sure `HATCHET_CLIENT_TOKEN` matches the token generated in the Hatchet dashboard for your tenant. +2. **Check worker logs** — look for connection errors, heartbeat failures, or crash traces in your worker output. +3. **Check the dashboard** — navigate to the Workers tab to see if your worker is registered and healthy. +4. **Confirm network connectivity** — workers need to reach the Hatchet engine over gRPC. Firewalls, VPNs, or missing TLS configuration can block this. +5. **Check SDK version** — ensure your SDK version is compatible with your engine version. Mismatches can cause subtle failures. + +## Could not send task to worker + +If you see this error in the event history of a task, it could mean several things: + +1. The worker is closing its network connection while the task is being sent. This could be caused by the worker crashing or going offline. + +2. The payload is too large for the worker to accept or the Hatchet engine to send. The default maximum payload size is 4MB. Consider reducing the size of the input data or output data of your tasks. + +3. The worker has a large backlog of tasks in-flight on the network connection and is rejecting new tasks. This can occur if workers are geographically distant from the Hatchet engine or if there are network issues causing delays. Hatchet Cloud runs by default in `us-west-2` (Oregon, USA), so consider deploying your workers in a region close to that for the best performance. + + If you are self-hosting, you can increase the maximum backlog size via the `SERVER_GRPC_WORKER_STREAM_MAX_BACKLOG_SIZE` environment variable in your Hatchet engine configuration. The default is 20. + +## No workers visible in dashboard + +If you have deployed workers but they are not visible in the Hatchet dashboard, it is likely that: + +1. Your API token is invalid or incorrect. Ensure that the token you are using to start the worker matches the token generated in the Hatchet dashboard for your tenant. + +2. Worker heartbeats are not reaching the Hatchet engine. You will see noisy logs in the worker output if this is the case. + +## Tasks stuck in QUEUED state + +If tasks remain in the `QUEUED` state and never move to `RUNNING`: + +1. **No workers registered for the task** — check the Workers tab in the dashboard and confirm a worker is registered that handles the task name. If you recently renamed a task, make sure the worker has been restarted with the updated code. + +2. **All worker slots are full** — if every slot is occupied by other tasks, new tasks will wait in the queue. Check worker utilization in the dashboard or increase the [slot count](/v1/workers#slots). + +3. **Concurrency or rate limit is blocking** — if you've configured [concurrency limits](/v1/concurrency) or [rate limits](/v1/rate-limits), tasks may be held back intentionally. Review your configuration. + +## Worker keeps disconnecting + +If your worker repeatedly connects and then drops: + +1. **Resource exhaustion** — the worker process may be running out of memory or CPU and getting killed by the OS or orchestrator (OOM kill). Check system logs and increase resource limits. + +2. **Network instability** — intermittent connectivity between the worker and the Hatchet engine will cause reconnection cycles. Check for packet loss or high latency between the worker and the engine. + +3. **Graceful shutdown not configured** — if your deployment platform sends `SIGTERM` and the worker doesn't handle it, in-flight tasks may be interrupted. Ensure your worker handles shutdown signals and gives tasks time to complete. + +## Phantom workers active in dashboard + +This is often due to workers still running in your deployed environment. We see this most often with very long termination periods for workers, or in local development environments where worker processes are leaking. If you are in a local development environment, you can usually view running Hatchet worker processes via `ps -a | grep worker` (or whatever your entrypoint binary is called) and kill them manually. diff --git a/frontend/docs/pages/v1/uptime.mdx b/frontend/docs/pages/v1/uptime.mdx new file mode 100644 index 0000000000..a8188e10ca --- /dev/null +++ b/frontend/docs/pages/v1/uptime.mdx @@ -0,0 +1,20 @@ +# Uptime and status + +For Hatchet Cloud availability and incident updates, use the status page. For self-hosted deployments, availability depends on your own infrastructure. + +## Hatchet Cloud status page + +Use **[status.hatchet.run](https://status.hatchet.run/)** for real-time status and incident history for Hatchet Cloud and related services: + +- **API**: Hatchet API availability +- **Hatchet Cloud**: `cloud.onhatchet.run` +- **Website**: `hatchet.run` and documentation sites + +You can also subscribe to updates (email/SMS/etc.) directly from the status page. + +## Self-hosted deployments + +If you self-host Hatchet, you’re responsible for uptime, monitoring, backups, and upgrade procedures. + +- **Deployment guidance**: [Self Hosting](/self-hosting) +- **Redundancy & failover**: [High Availability](/self-hosting/high-availability) diff --git a/frontend/docs/pages/home/coding-agents.mdx b/frontend/docs/pages/v1/using-coding-agents.mdx similarity index 100% rename from frontend/docs/pages/home/coding-agents.mdx rename to frontend/docs/pages/v1/using-coding-agents.mdx diff --git a/frontend/docs/pages/v1/webhooks.mdx b/frontend/docs/pages/v1/webhooks.mdx new file mode 100644 index 0000000000..c90cac6262 --- /dev/null +++ b/frontend/docs/pages/v1/webhooks.mdx @@ -0,0 +1,89 @@ +import { Callout, Card, Cards, Steps, Tabs } from "nextra/components"; +import UniversalTabs from "@/components/UniversalTabs"; + +# Webhooks + + + This feature is currently in development and might change. Reach out for + feedback or if you encounter any problems registering any external webhooks. + + +Webhooks allow external systems to trigger Hatchet workflows by sending HTTP requests to dedicated endpoints. This enables real-time integration with third-party services like GitHub, Stripe, Slack, or any system that can send webhook events. + +## Creating a webhook + +To create a webhook, you'll need to fill out some fields that tell Hatchet how to determine which workflows to trigger from your webhook, and how to validate it when it arrives from the sender. In particular, you'll need to provide the following fields: + +#### Name + +The **Webhook Name** is tenant-unique (meaning a single tenant can only use each name once), and is used to create the URL for where the incoming webhook request should be sent. For instance, if your tenant id was `d60181b7-da6c-4d4c-92ec-8aa0fc74b3e5` and your webhook name was `my-webhook`, then the URL might look like `https://cloud.onhatchet.run/api/v1/stable/tenants/d60181b7-da6c-4d4c-92ec-8aa0fc74b3e5/webhooks/my-webhook`. Note that you can copy this URL in the dashboard. + +#### Source + +The **Source** indicates the source of the webhook, which can be a pre-provided one for easy setup like Stripe or Github, or a "generic" one, which lets you configure all of the necessary fields for your webhook integration based on what the webhook sender provides. + +#### Event Key Expression + +The **Event Key Expression** is a [CEL](https://cel.dev/) expression that you can use to create a dynamic event key from the payload and headers of the incoming webhook. You can either set this to a constant value, like `webhook`, or you could set it to something dynamic using those two options. Some examples: + +1. `'stripe:' + input.type` would create event keys where `'stripe:'` is a prefix for all keys indicating the webhook came from Stripe, and `input.type` selects the `type` field off of the webhook payload and uses it to create the final event key. The result might look something like `stripe:payment_intent.created`. +2. `'github:' + headers['x-github-event'] + ':' + input.action` could create a key like `github:star:created` + + + The result of the event key expression is what Hatchet will use as the event + key, so you'd need to set a matching event key as a trigger on your workflows + in order to trigger them from the webhooks you create. For instance, you might + add `on_events=["stripe:payment_intent.created"]` to listen for payment intent + created events in the previous example. + + +#### Scope Expression (Optional) + +The **Scope Expression** is an optional [CEL](https://cel.dev/) expression that evaluates to a string used to filter which workflows to trigger. This is useful when you have multiple workflows listening to the same event key but want to route to specific workflows based on the webhook content. + +Like the event key expression, you have access to `input` (the webhook payload) and `headers` (the request headers). Some examples: + +1. `input.customer_id` would use the customer ID from the payload as the scope +2. `headers['x-organization-id']` would use a header value as the scope +3. `input.metadata.environment` could route to different workflows based on environment + +#### Static Payload (Optional) + +The **Static Payload** is an optional JSON object that gets merged with the incoming webhook payload before it's passed to your workflows. This is useful for: + +- Adding constant metadata to all events from this webhook +- Injecting configuration values that aren't in the original payload +- Overriding specific fields from the incoming payload + + + When there's a key collision between the incoming webhook payload and the + static payload, the static payload values take precedence. + + +For example, if you set a static payload of `{"source": "stripe", "environment": "production"}` and receive a webhook with `{"type": "payment_intent.created", "source": "api"}`, the final payload passed to your workflow would be `{"type": "payment_intent.created", "source": "stripe", "environment": "production"}`. + +#### Authentication + +Finally, you'll need to specify how Hatchet should authenticate incoming webhook requests. For non-generic sources like Stripe and Github, Hatchet has presets for most of the fields, so in most cases you'd only need to provide a secret. + +If you're using a generic source, then you'll need to specify an authentication method (either basic auth, an API key, HMAC-based auth), and provide the required fields (such as a username and password in the basic auth case). + + + Hatchet encrypts any secrets you provide for validating incoming webhooks. + + +The different authentication methods require different fields to be provided: + +- **Pre-configured sources** (Stripe, GitHub, Slack): Only require a webhook secret +- **Generic sources** require different fields depending on the selected authentication method: + - **Basic Auth**: Requires a username and password + - **API Key**: Requires header name containing the key on incoming requests, and secret key itself + - **HMAC**: Requires a header name containing the secret on incoming requests, the secret itself, an encoding method (e.g. hex, base64), and an algorithm (e.g. `SHA256`, `SHA1`, etc.). + +## Usage + +While you're creating your webhook (and also after you've created it), you can copy the webhook URL, which is what you'll provide to the webhook _sender_. + +Once you've done that, the last thing to do is register the event keys you want your workers to listen for so that they can be triggered by incoming webhooks. + +For examples on how to do this, see the [documentation on event triggers](/v1/external-events/run-on-event). diff --git a/frontend/docs/pages/v1/worker-healthchecks.mdx b/frontend/docs/pages/v1/worker-healthchecks.mdx new file mode 100644 index 0000000000..cd0858befd --- /dev/null +++ b/frontend/docs/pages/v1/worker-healthchecks.mdx @@ -0,0 +1,79 @@ +# Worker Health Checks + +The Python SDK allows you to enable and ping a healthcheck to check on the status of your worker. + +### Usage + +First, set the `HATCHET_CLIENT_WORKER_HEALTHCHECK_ENABLED` environment variable to `True`. Once that flag is set, two health check endpoints will be available (on port `8001` by default): + +1. `/health` - Returns **200** when the worker listener is healthy, otherwise **503** with body `{"status":"HEALTHY"}` or `{"status":"UNHEALTHY"}`. +2. `/metrics` - A metrics endpoint intended to be used by a monitoring system like Prometheus. + +### Custom Port + +You can set a custom port with the `HATCHET_CLIENT_WORKER_HEALTHCHECK_PORT` environment variable, e.g. `HATCHET_CLIENT_WORKER_HEALTHCHECK_PORT=8002`. + +### Event loop blocked threshold + +If the worker listener process event loop becomes blocked for longer than a threshold, `/health` will return **503**. + +You can configure this threshold (in seconds) with: + +- `HATCHET_CLIENT_WORKER_HEALTHCHECK_EVENT_LOOP_BLOCK_THRESHOLD_SECONDS` (default: `5.0`) + +#### Example request to `/health`: + +```bash +curl localhost:8001/health + +{"status":"HEALTHY"} +``` + +#### Example request to `/metrics`: + +```bash +curl localhost:8001/metrics + +# HELP python_gc_objects_collected_total Objects collected during gc +# TYPE python_gc_objects_collected_total counter +python_gc_objects_collected_total{generation="0"} 18782.0 +python_gc_objects_collected_total{generation="1"} 4907.0 +python_gc_objects_collected_total{generation="2"} 244.0 +# HELP python_gc_objects_uncollectable_total Uncollectable objects found during GC +# TYPE python_gc_objects_uncollectable_total counter +python_gc_objects_uncollectable_total{generation="0"} 0.0 +python_gc_objects_uncollectable_total{generation="1"} 0.0 +python_gc_objects_uncollectable_total{generation="2"} 0.0 +# HELP python_gc_collections_total Number of times this generation was collected +# TYPE python_gc_collections_total counter +python_gc_collections_total{generation="0"} 308.0 +python_gc_collections_total{generation="1"} 27.0 +python_gc_collections_total{generation="2"} 2.0 +# HELP python_info Python platform information +# TYPE python_info gauge +python_info{implementation="CPython",major="3",minor="10",patchlevel="15",version="3.10.15"} 1.0 +# HELP hatchet_worker_listener_health_my_worker Listener health (1 healthy, 0 unhealthy) +# TYPE hatchet_worker_listener_health_my_worker gauge +hatchet_worker_listener_health_my_worker 1.0 +# HELP hatchet_worker_event_loop_lag_seconds_my_worker Event loop lag in seconds (listener process) +# TYPE hatchet_worker_event_loop_lag_seconds_my_worker gauge +hatchet_worker_event_loop_lag_seconds_my_worker 0.0 +``` + +#### Example Prometheus Configuration for `/metrics`: + +```yaml +scrape_configs: + - job_name: "hatchet" + scrape_interval: 5s + static_configs: + - targets: ["localhost:8001"] +``` + +#### Example Prometheus Query + +An example query to check if the worker is healthy might look something like: + +``` +(hatchet_worker_listener_health_my_worker{instance="localhost:8001", job="hatchet"}) or vector(0) +``` diff --git a/frontend/docs/pages/v1/workers.mdx b/frontend/docs/pages/v1/workers.mdx new file mode 100644 index 0000000000..7e6dfd10a7 --- /dev/null +++ b/frontend/docs/pages/v1/workers.mdx @@ -0,0 +1,187 @@ +import { snippets } from "@/lib/generated/snippets"; +import { Snippet } from "@/components/code"; +import { Callout, Tabs } from "nextra/components"; +import UniversalTabs from "@/components/UniversalTabs"; + +# Workers + +Workers are the processes that actually execute your [tasks](/v1/tasks). Each worker is a long-running process in your infrastructure that maintains a persistent gRPC connection to the Hatchet engine. Workers receive task assignments, run your code, and report results back. You can run them locally during development, in containers, or on VMs - and scale them independently from the rest of your stack. + +## Declaring a worker + +A worker needs a name and a set of tasks to handle. Call the `worker` method on the Hatchet client with both. + + + + + + + + + + + + + + + + +When a worker starts, it registers each of its tasks with the Hatchet engine. From that point on, Hatchet knows to route matching tasks to that worker. Multiple workers can register the same task - Hatchet distributes work across all of them. + +## Starting a worker + + + + +The fastest way to run a worker during development is with the Hatchet CLI. This handles authentication and hot-reloads your worker when code changes: + +```bash +hatchet worker dev +``` + + + + +You can also run the worker script directly. This requires a `HATCHET_CLIENT_TOKEN` environment variable. You can generate an API token from the Hatchet dashboard by navigating to the **Settings** tab and clicking **API Tokens**. Click **Generate API Token** to create a new token, and do not share it publicly. + +```bash +export HATCHET_CLIENT_TOKEN="" +``` + +If you are a self-hosted user without TLS enabled, also set: + +```bash +export HATCHET_CLIENT_TLS_STRATEGY=none +``` + +Then run your worker: + + + +```bash +python worker.py +``` + + + +Add a script to your `package.json`: + +```json +"scripts": { + "start:worker": "ts-node src/worker.ts" +} +``` + +Then run it: + +```bash +npm run start:worker +``` + + + +```bash +go run main.go +``` + + +```bash +bundle exec ruby worker.rb +``` + + + + + + +Once the worker starts, you will see logs confirming it is connected: + +``` +[INFO] 🪓 -- STARTING HATCHET... +[DEBUG] 🪓 -- 'test-worker' waiting for ['simpletask:step1'] +[DEBUG] 🪓 -- acquired action listener: efc4aaf2-... +[DEBUG] 🪓 -- sending heartbeat +``` + + + For self-hosted users, you may need to set additional gRPC configuration + options. See the [Self-Hosting](/self-hosting/worker-configuration-options) + docs for details. + + +## Worker lifecycle + +A worker moves through four phases during its lifetime: + +```mermaid +graph LR + Created -->|Connects to engine| ACTIVE + ACTIVE -->|Heartbeat timeout| INACTIVE + ACTIVE -->|Graceful shutdown| STOPPED + INACTIVE -->|Reconnects| ACTIVE +``` + +- **ACTIVE** - the worker is connected and accepting tasks. +- **INACTIVE** - the engine has not received a heartbeat within the expected window. Tasks assigned to this worker will be reassigned. +- **STOPPED** - the worker shut down gracefully. In-flight tasks are allowed to complete before the process exits. + +Hatchet uses heartbeats to monitor worker health. Workers send a heartbeat every **4 seconds**. If the engine does not receive a heartbeat for **30 seconds**, the worker is marked INACTIVE and its in-flight tasks are re-queued for other workers to pick up. + +Common reasons a worker misses heartbeats: + +- **Process crash** - the worker process exits unexpectedly (OOM kill, unhandled exception, SIGKILL). +- **Network disruption** - the connection between the worker and the Hatchet engine is interrupted (DNS failure, firewall change, cloud network blip). +- **Blocked main thread** - a long-running synchronous computation (e.g. CPU-intensive work, a blocking FFI call) starves the heartbeat loop and prevents it from sending on time. + +## Slots + +Every worker has a fixed number of **slots** that control how many tasks it can run concurrently. You configure them with the `slots` option on the worker. If you set `slots=5`, the worker will run up to five tasks at the same time. Any additional tasks wait in the queue until a slot opens up. + +Slots are a **local** limit - they protect the individual worker process from overcommitting its CPU, memory, or event loop. [Concurrency controls](/v1/concurrency) are a **global** limit across your entire fleet - use them to prevent a single tenant or use-case from monopolizing capacity, or to respect the limits of an external resource like a third-party API or database connection pool. The two work together: concurrency controls decide how many runs Hatchet will allow to be active; slots decide how many of those runs each individual worker is willing to accept. + +### Choosing a slot count + +Start with a slot count that matches the degree of parallelism your worker can sustain. For CPU-heavy tasks, that is typically the number of available cores. For I/O-heavy tasks (HTTP calls, database queries), you can safely go higher because most of the time is spent waiting. + + + Adding slots is only helpful up to the point where the worker is not + bottlenecked by another resource. If your worker is CPU-bound, memory-bound, + or waiting on network I/O, more slots will just increase contention. Monitor + memory usage and event loop lag after changing slot counts - if either climbs, + you have gone too far. + + +## Scaling workers + +You can increase throughput in two ways: add more slots to a single worker, or run more worker processes. In most workloads, horizontal scaling (more workers) is the simplest path because each worker brings its own pool of slots and its own resources. + +When running in Kubernetes or a similar orchestrator, you can autoscale workers based on queue depth using the [Task Stats API](/v1/autoscaling-workers). Hatchet also supports [KEDA integration](/v1/autoscaling-workers#autoscaling-with-keda) for event-driven autoscaling. + +## Task assignment + +By default, Hatchet distributes tasks to any available worker that has registered the task. You can influence this behavior in several ways: + +| Concept | What it does | +| ------------------------------------------------------------------ | --------------------------------------------------------------- | +| [Worker Affinity](/v1/advanced-assignment/worker-affinity) | Prefer or require specific workers based on labels and weights. | +| [Sticky Assignment](/v1/advanced-assignment/sticky-assignment) | Pin related tasks in a workflow to the same worker. | +| [Manual Slot Release](/v1/advanced-assignment/manual-slot-release) | Free a worker slot before the task function returns. | + +These are useful when a worker has specialized hardware (a GPU, a loaded ML model), or when co-locating related tasks on the same worker avoids redundant setup. + +## Running in production + +In development, the fastest way to run a worker is `hatchet worker dev`, which handles authentication and hot-reloads your code on changes. In production, you'll run workers as standalone processes or containers. + +| Concept | What it does | +| ----------------------------------------------- | ----------------------------------------------------------------------- | +| [Running with Docker](/v1/docker) | Containerize workers for deployment. | +| [Autoscaling Workers](/v1/autoscaling-workers) | Scale workers dynamically based on queue depth. | +| [Worker Health Checks](/v1/worker-healthchecks) | Expose `/health` and `/metrics` endpoints for monitoring. | +| [Preparing for Production](/v1/production) | Operational best practices for monitoring, error handling, and scaling. | + +## Workers and tasks + +Workers and tasks have a many-to-many relationship. A single worker can register many tasks, and a single task can be registered on many workers. This means you can organize your workers by resource requirements, deployment boundary, or any other criterion - and Hatchet handles routing tasks to the right place. + +If you haven't already, read about [tasks](/v1/tasks) to understand how work is defined and configured. diff --git a/frontend/docs/pnpm-lock.yaml b/frontend/docs/pnpm-lock.yaml index 81dfe4c631..daf3798d92 100644 --- a/frontend/docs/pnpm-lock.yaml +++ b/frontend/docs/pnpm-lock.yaml @@ -19,12 +19,21 @@ importers: .: dependencies: + '@radix-ui/react-dialog': + specifier: ^1.1.15 + version: 1.1.15(@types/react-dom@18.3.7(@types/react@18.3.26))(@types/react@18.3.26)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) '@radix-ui/react-icons': specifier: ^1.3.2 version: 1.3.2(react@18.3.1) + '@radix-ui/react-select': + specifier: ^2.1.6 + version: 2.2.6(@types/react-dom@18.3.7(@types/react@18.3.26))(@types/react@18.3.26)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) '@radix-ui/react-slot': specifier: ^1.2.3 version: 1.2.3(@types/react@18.3.26)(react@18.3.1) + '@radix-ui/react-tooltip': + specifier: ^1.2.8 + version: 1.2.8(@types/react-dom@18.3.7(@types/react@18.3.26))(@types/react@18.3.26)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) '@types/js-yaml': specifier: ^4.0.9 version: 4.0.9 @@ -572,6 +581,38 @@ packages: '@posthog/core@1.22.0': resolution: {integrity: sha512-WkmOnq95aAOu6yk6r5LWr5cfXsQdpVbWDCwOxQwxSne8YV6GuZET1ziO5toSQXgrgbdcjrSz2/GopAfiL6iiAA==} + '@radix-ui/number@1.1.1': + resolution: {integrity: sha512-MkKCwxlXTgz6CFoJx3pCwn07GKp36+aZyu/u2Ln2VrA5DcdyCZkASEDBTd8x5whTQQL5CiYf4prXKLcgQdv29g==} + + '@radix-ui/primitive@1.1.3': + resolution: {integrity: sha512-JTF99U/6XIjCBo0wqkU5sK10glYe27MRRsfwoiq5zzOEZLHU3A3KCMa5X/azekYRCJ0HlwI0crAXS/5dEHTzDg==} + + '@radix-ui/react-arrow@1.1.7': + resolution: {integrity: sha512-F+M1tLhO+mlQaOWspE8Wstg+z6PwxwRd8oQ8IXceWz92kfAmalTRf0EjrouQeo7QssEPfCn05B4Ihs1K9WQ/7w==} + peerDependencies: + '@types/react': '*' + '@types/react-dom': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + react-dom: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + '@types/react-dom': + optional: true + + '@radix-ui/react-collection@1.1.7': + resolution: {integrity: sha512-Fh9rGN0MoI4ZFUNyfFVNU4y9LUz93u9/0K+yLgA2bwRojxM8JU1DyvvMBabnZPBgMWREAJvU2jjVzq+LrFUglw==} + peerDependencies: + '@types/react': '*' + '@types/react-dom': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + react-dom: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + '@types/react-dom': + optional: true + '@radix-ui/react-compose-refs@1.1.2': resolution: {integrity: sha512-z4eqJvfiNnFMHIIvXP3CY57y2WJs5g2v3X0zm9mEJkrkNv4rDxu+sg9Jh8EkXyeqBkB7SOcboo9dMVqhyrACIg==} peerDependencies: @@ -581,11 +622,151 @@ packages: '@types/react': optional: true + '@radix-ui/react-context@1.1.2': + resolution: {integrity: sha512-jCi/QKUM2r1Ju5a3J64TH2A5SpKAgh0LpknyqdQ4m6DCV0xJ2HG1xARRwNGPQfi1SLdLWZ1OJz6F4OMBBNiGJA==} + peerDependencies: + '@types/react': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + + '@radix-ui/react-dialog@1.1.15': + resolution: {integrity: sha512-TCglVRtzlffRNxRMEyR36DGBLJpeusFcgMVD9PZEzAKnUs1lKCgX5u9BmC2Yg+LL9MgZDugFFs1Vl+Jp4t/PGw==} + peerDependencies: + '@types/react': '*' + '@types/react-dom': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + react-dom: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + '@types/react-dom': + optional: true + + '@radix-ui/react-direction@1.1.1': + resolution: {integrity: sha512-1UEWRX6jnOA2y4H5WczZ44gOOjTEmlqv1uNW4GAJEO5+bauCBhv8snY65Iw5/VOS/ghKN9gr2KjnLKxrsvoMVw==} + peerDependencies: + '@types/react': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + + '@radix-ui/react-dismissable-layer@1.1.11': + resolution: {integrity: sha512-Nqcp+t5cTB8BinFkZgXiMJniQH0PsUt2k51FUhbdfeKvc4ACcG2uQniY/8+h1Yv6Kza4Q7lD7PQV0z0oicE0Mg==} + peerDependencies: + '@types/react': '*' + '@types/react-dom': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + react-dom: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + '@types/react-dom': + optional: true + + '@radix-ui/react-focus-guards@1.1.3': + resolution: {integrity: sha512-0rFg/Rj2Q62NCm62jZw0QX7a3sz6QCQU0LpZdNrJX8byRGaGVTqbrW9jAoIAHyMQqsNpeZ81YgSizOt5WXq0Pw==} + peerDependencies: + '@types/react': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + + '@radix-ui/react-focus-scope@1.1.7': + resolution: {integrity: sha512-t2ODlkXBQyn7jkl6TNaw/MtVEVvIGelJDCG41Okq/KwUsJBwQ4XVZsHAVUkK4mBv3ewiAS3PGuUWuY2BoK4ZUw==} + peerDependencies: + '@types/react': '*' + '@types/react-dom': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + react-dom: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + '@types/react-dom': + optional: true + '@radix-ui/react-icons@1.3.2': resolution: {integrity: sha512-fyQIhGDhzfc9pK2kH6Pl9c4BDJGfMkPqkyIgYDthyNYoNg3wVhoJMMh19WS4Up/1KMPFVpNsT2q3WmXn2N1m6g==} peerDependencies: react: ^16.x || ^17.x || ^18.x || ^19.0.0 || ^19.0.0-rc + '@radix-ui/react-id@1.1.1': + resolution: {integrity: sha512-kGkGegYIdQsOb4XjsfM97rXsiHaBwco+hFI66oO4s9LU+PLAC5oJ7khdOVFxkhsmlbpUqDAvXw11CluXP+jkHg==} + peerDependencies: + '@types/react': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + + '@radix-ui/react-popper@1.2.8': + resolution: {integrity: sha512-0NJQ4LFFUuWkE7Oxf0htBKS6zLkkjBH+hM1uk7Ng705ReR8m/uelduy1DBo0PyBXPKVnBA6YBlU94MBGXrSBCw==} + peerDependencies: + '@types/react': '*' + '@types/react-dom': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + react-dom: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + '@types/react-dom': + optional: true + + '@radix-ui/react-portal@1.1.9': + resolution: {integrity: sha512-bpIxvq03if6UNwXZ+HTK71JLh4APvnXntDc6XOX8UVq4XQOVl7lwok0AvIl+b8zgCw3fSaVTZMpAPPagXbKmHQ==} + peerDependencies: + '@types/react': '*' + '@types/react-dom': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + react-dom: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + '@types/react-dom': + optional: true + + '@radix-ui/react-presence@1.1.5': + resolution: {integrity: sha512-/jfEwNDdQVBCNvjkGit4h6pMOzq8bHkopq458dPt2lMjx+eBQUohZNG9A7DtO/O5ukSbxuaNGXMjHicgwy6rQQ==} + peerDependencies: + '@types/react': '*' + '@types/react-dom': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + react-dom: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + '@types/react-dom': + optional: true + + '@radix-ui/react-primitive@2.1.3': + resolution: {integrity: sha512-m9gTwRkhy2lvCPe6QJp4d3G1TYEUHn/FzJUtq9MjH46an1wJU+GdoGC5VLof8RX8Ft/DlpshApkhswDLZzHIcQ==} + peerDependencies: + '@types/react': '*' + '@types/react-dom': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + react-dom: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + '@types/react-dom': + optional: true + + '@radix-ui/react-select@2.2.6': + resolution: {integrity: sha512-I30RydO+bnn2PQztvo25tswPH+wFBjehVGtmagkU78yMdwTwVf12wnAOF+AeP8S2N8xD+5UPbGhkUfPyvT+mwQ==} + peerDependencies: + '@types/react': '*' + '@types/react-dom': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + react-dom: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + '@types/react-dom': + optional: true + '@radix-ui/react-slot@1.2.3': resolution: {integrity: sha512-aeNmHnBxbi2St0au6VBVC7JXFlhLlOnvIIlePNniyUNAClzmtAUEY8/pBiK3iHjufOlwA+c20/8jngo7xcrg8A==} peerDependencies: @@ -595,6 +776,107 @@ packages: '@types/react': optional: true + '@radix-ui/react-tooltip@1.2.8': + resolution: {integrity: sha512-tY7sVt1yL9ozIxvmbtN5qtmH2krXcBCfjEiCgKGLqunJHvgvZG2Pcl2oQ3kbcZARb1BGEHdkLzcYGO8ynVlieg==} + peerDependencies: + '@types/react': '*' + '@types/react-dom': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + react-dom: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + '@types/react-dom': + optional: true + + '@radix-ui/react-use-callback-ref@1.1.1': + resolution: {integrity: sha512-FkBMwD+qbGQeMu1cOHnuGB6x4yzPjho8ap5WtbEJ26umhgqVXbhekKUQO+hZEL1vU92a3wHwdp0HAcqAUF5iDg==} + peerDependencies: + '@types/react': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + + '@radix-ui/react-use-controllable-state@1.2.2': + resolution: {integrity: sha512-BjasUjixPFdS+NKkypcyyN5Pmg83Olst0+c6vGov0diwTEo6mgdqVR6hxcEgFuh4QrAs7Rc+9KuGJ9TVCj0Zzg==} + peerDependencies: + '@types/react': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + + '@radix-ui/react-use-effect-event@0.0.2': + resolution: {integrity: sha512-Qp8WbZOBe+blgpuUT+lw2xheLP8q0oatc9UpmiemEICxGvFLYmHm9QowVZGHtJlGbS6A6yJ3iViad/2cVjnOiA==} + peerDependencies: + '@types/react': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + + '@radix-ui/react-use-escape-keydown@1.1.1': + resolution: {integrity: sha512-Il0+boE7w/XebUHyBjroE+DbByORGR9KKmITzbR7MyQ4akpORYP/ZmbhAr0DG7RmmBqoOnZdy2QlvajJ2QA59g==} + peerDependencies: + '@types/react': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + + '@radix-ui/react-use-layout-effect@1.1.1': + resolution: {integrity: sha512-RbJRS4UWQFkzHTTwVymMTUv8EqYhOp8dOOviLj2ugtTiXRaRQS7GLGxZTLL1jWhMeoSCf5zmcZkqTl9IiYfXcQ==} + peerDependencies: + '@types/react': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + + '@radix-ui/react-use-previous@1.1.1': + resolution: {integrity: sha512-2dHfToCj/pzca2Ck724OZ5L0EVrr3eHRNsG/b3xQJLA2hZpVCS99bLAX+hm1IHXDEnzU6by5z/5MIY794/a8NQ==} + peerDependencies: + '@types/react': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + + '@radix-ui/react-use-rect@1.1.1': + resolution: {integrity: sha512-QTYuDesS0VtuHNNvMh+CjlKJ4LJickCMUAqjlE3+j8w+RlRpwyX3apEQKGFzbZGdo7XNG1tXa+bQqIE7HIXT2w==} + peerDependencies: + '@types/react': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + + '@radix-ui/react-use-size@1.1.1': + resolution: {integrity: sha512-ewrXRDTAqAXlkl6t/fkXWNAhFX9I+CkKlw6zjEwk86RSPKwZr3xpBRso655aqYafwtnbpHLj6toFzmd6xdVptQ==} + peerDependencies: + '@types/react': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + + '@radix-ui/react-visually-hidden@1.2.3': + resolution: {integrity: sha512-pzJq12tEaaIhqjbzpCuv/OypJY/BPavOofm+dbab+MHLajy277+1lLm6JFcGgF5eskJ6mquGirhXY2GD/8u8Ug==} + peerDependencies: + '@types/react': '*' + '@types/react-dom': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + react-dom: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + '@types/react-dom': + optional: true + + '@radix-ui/rect@1.1.1': + resolution: {integrity: sha512-HPwpGIzkl28mWyZqG52jiqDJ12waP11Pa1lGoiyUkIEuMLBP0oeK/C89esbXrxsky5we7dfd8U58nm0SgAWpVw==} + '@react-aria/focus@3.21.2': resolution: {integrity: sha512-JWaCR7wJVggj+ldmM/cb/DXFg47CXR55lznJhZBh4XVqJjMKwaOOqpT5vNN7kpC1wUpXicGNuDnJDN1S/+6dhQ==} peerDependencies: @@ -1009,6 +1291,10 @@ packages: argparse@2.0.1: resolution: {integrity: sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==} + aria-hidden@1.2.6: + resolution: {integrity: sha512-ik3ZgC9dY/lYVVM++OISsaYDeg1tb0VtP5uL3ouh1koGOaUMDPpbFIei4JkFimWUFPn90sbMNMXQAIVOlnYKJA==} + engines: {node: '>=10'} + array-iterate@2.0.1: resolution: {integrity: sha512-I1jXZMjAgCMmxT4qxXfPXa6SthSoE8h6gkSI9BGGNv8mP8G/v0blc+qFnZu6K42vTOiuME596QaLO0TP3Lk0xg==} @@ -1425,6 +1711,9 @@ packages: resolution: {integrity: sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA==} engines: {node: '>=6'} + detect-node-es@1.1.0: + resolution: {integrity: sha512-ypdmJU/TbBby2Dxibuv7ZLW3Bs1QEmM7nHjEANfohJLvE0XVujisn1qPJcZxg+qDucsr+bP6fLD1rPS3AhJ7EQ==} + devlop@1.1.0: resolution: {integrity: sha512-RWmIqhcFf1lRYBvNmr7qTNuyCt/7/ns2jbpp1+PalgE/rDQcBT0fioSMUpJ93irlUhC5hrg4cYqe6U+0ImW0rA==} @@ -1631,6 +1920,10 @@ packages: resolution: {integrity: sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==} engines: {node: '>= 0.4'} + get-nonce@1.0.1: + resolution: {integrity: sha512-FJhYRoDaiatfEkUK8HKlicmu/3SGFD51q3itKDGoSTysQJBnfOcxU5GxnhE1E6soB76MbT0MBtnKJuXyAx+96Q==} + engines: {node: '>=6'} + get-proto@1.0.1: resolution: {integrity: sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==} engines: {node: '>= 0.4'} @@ -2593,12 +2886,42 @@ packages: redux: optional: true + react-remove-scroll-bar@2.3.8: + resolution: {integrity: sha512-9r+yi9+mgU33AKcj6IbT9oRCO78WriSj6t/cF8DWBZJ9aOGPOTEDvdUDz1FwKim7QXWwmHqtdHnRJfhAxEG46Q==} + engines: {node: '>=10'} + peerDependencies: + '@types/react': '*' + react: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 + peerDependenciesMeta: + '@types/react': + optional: true + + react-remove-scroll@2.7.2: + resolution: {integrity: sha512-Iqb9NjCCTt6Hf+vOdNIZGdTiH1QSqr27H/Ek9sv/a97gfueI/5h1s3yRi1nngzMUaOOToin5dI1dXKdXiF+u0Q==} + engines: {node: '>=10'} + peerDependencies: + '@types/react': '*' + react: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + react-smooth@4.0.4: resolution: {integrity: sha512-gnGKTpYwqL0Iii09gHobNolvX4Kiq4PKx6eWBCYYix+8cdw+cGo3do906l1NBPKkSWx1DghC1dlWG9L2uGd61Q==} peerDependencies: react: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 + react-style-singleton@2.2.3: + resolution: {integrity: sha512-b6jSvxvVnyptAiLjbkWLE/lOnR4lfTtDAl+eUC7RZy+QQWc6wRzIV2CE6xBuMmDxc2qIihtDCZD5NPOFl7fRBQ==} + engines: {node: '>=10'} + peerDependencies: + '@types/react': '*' + react: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + react-syntax-highlighter@15.6.6: resolution: {integrity: sha512-DgXrc+AZF47+HvAPEmn7Ua/1p10jNoVZVI/LoPiYdtY+OM+/nG5yefLHKJwdKqY1adMuHFbeyBaG9j64ML7vTw==} peerDependencies: @@ -3100,6 +3423,26 @@ packages: url-parse@1.5.10: resolution: {integrity: sha512-WypcfiRhfeUP9vvF0j6rw0J3hrWrw6iZv3+22h6iRMJ/8z1Tj6XfLP4DsUix5MhMPnXpiHDoKyoZ/bdCkwBCiQ==} + use-callback-ref@1.3.3: + resolution: {integrity: sha512-jQL3lRnocaFtu3V00JToYz/4QkNWswxijDaCVNZRiRTO3HQDLsdu1ZtmIUvV4yPp+rvWm5j0y0TG/S61cuijTg==} + engines: {node: '>=10'} + peerDependencies: + '@types/react': '*' + react: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + + use-sidecar@1.1.3: + resolution: {integrity: sha512-Fedw0aZvkhynoPYlA5WXrMCAMm+nSWdZt6lzJQ7Ok8S6Q+VsHmHpRWndVRJ8Be0ZbkfPc5LRYH+5XrzXcEeLRQ==} + engines: {node: '>=10'} + peerDependencies: + '@types/react': '*' + react: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + use-sync-external-store@1.6.0: resolution: {integrity: sha512-Pp6GSwGP/NrPIrxVFAIkOQeyw8lFenOHijQWkUTrDvrF4ALqylP2C/KCkeS9dpUM3KvYRQhna5vt7IL95+ZQ9w==} peerDependencies: @@ -3558,16 +3901,188 @@ snapshots: dependencies: cross-spawn: 7.0.6 + '@radix-ui/number@1.1.1': {} + + '@radix-ui/primitive@1.1.3': {} + + '@radix-ui/react-arrow@1.1.7(@types/react-dom@18.3.7(@types/react@18.3.26))(@types/react@18.3.26)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + dependencies: + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@18.3.7(@types/react@18.3.26))(@types/react@18.3.26)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + react: 18.3.1 + react-dom: 18.3.1(react@18.3.1) + optionalDependencies: + '@types/react': 18.3.26 + '@types/react-dom': 18.3.7(@types/react@18.3.26) + + '@radix-ui/react-collection@1.1.7(@types/react-dom@18.3.7(@types/react@18.3.26))(@types/react@18.3.26)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + dependencies: + '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.3.26)(react@18.3.1) + '@radix-ui/react-context': 1.1.2(@types/react@18.3.26)(react@18.3.1) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@18.3.7(@types/react@18.3.26))(@types/react@18.3.26)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@radix-ui/react-slot': 1.2.3(@types/react@18.3.26)(react@18.3.1) + react: 18.3.1 + react-dom: 18.3.1(react@18.3.1) + optionalDependencies: + '@types/react': 18.3.26 + '@types/react-dom': 18.3.7(@types/react@18.3.26) + '@radix-ui/react-compose-refs@1.1.2(@types/react@18.3.26)(react@18.3.1)': dependencies: react: 18.3.1 optionalDependencies: '@types/react': 18.3.26 + '@radix-ui/react-context@1.1.2(@types/react@18.3.26)(react@18.3.1)': + dependencies: + react: 18.3.1 + optionalDependencies: + '@types/react': 18.3.26 + + '@radix-ui/react-dialog@1.1.15(@types/react-dom@18.3.7(@types/react@18.3.26))(@types/react@18.3.26)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + dependencies: + '@radix-ui/primitive': 1.1.3 + '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.3.26)(react@18.3.1) + '@radix-ui/react-context': 1.1.2(@types/react@18.3.26)(react@18.3.1) + '@radix-ui/react-dismissable-layer': 1.1.11(@types/react-dom@18.3.7(@types/react@18.3.26))(@types/react@18.3.26)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@radix-ui/react-focus-guards': 1.1.3(@types/react@18.3.26)(react@18.3.1) + '@radix-ui/react-focus-scope': 1.1.7(@types/react-dom@18.3.7(@types/react@18.3.26))(@types/react@18.3.26)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@radix-ui/react-id': 1.1.1(@types/react@18.3.26)(react@18.3.1) + '@radix-ui/react-portal': 1.1.9(@types/react-dom@18.3.7(@types/react@18.3.26))(@types/react@18.3.26)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@radix-ui/react-presence': 1.1.5(@types/react-dom@18.3.7(@types/react@18.3.26))(@types/react@18.3.26)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@18.3.7(@types/react@18.3.26))(@types/react@18.3.26)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@radix-ui/react-slot': 1.2.3(@types/react@18.3.26)(react@18.3.1) + '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@18.3.26)(react@18.3.1) + aria-hidden: 1.2.6 + react: 18.3.1 + react-dom: 18.3.1(react@18.3.1) + react-remove-scroll: 2.7.2(@types/react@18.3.26)(react@18.3.1) + optionalDependencies: + '@types/react': 18.3.26 + '@types/react-dom': 18.3.7(@types/react@18.3.26) + + '@radix-ui/react-direction@1.1.1(@types/react@18.3.26)(react@18.3.1)': + dependencies: + react: 18.3.1 + optionalDependencies: + '@types/react': 18.3.26 + + '@radix-ui/react-dismissable-layer@1.1.11(@types/react-dom@18.3.7(@types/react@18.3.26))(@types/react@18.3.26)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + dependencies: + '@radix-ui/primitive': 1.1.3 + '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.3.26)(react@18.3.1) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@18.3.7(@types/react@18.3.26))(@types/react@18.3.26)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@radix-ui/react-use-callback-ref': 1.1.1(@types/react@18.3.26)(react@18.3.1) + '@radix-ui/react-use-escape-keydown': 1.1.1(@types/react@18.3.26)(react@18.3.1) + react: 18.3.1 + react-dom: 18.3.1(react@18.3.1) + optionalDependencies: + '@types/react': 18.3.26 + '@types/react-dom': 18.3.7(@types/react@18.3.26) + + '@radix-ui/react-focus-guards@1.1.3(@types/react@18.3.26)(react@18.3.1)': + dependencies: + react: 18.3.1 + optionalDependencies: + '@types/react': 18.3.26 + + '@radix-ui/react-focus-scope@1.1.7(@types/react-dom@18.3.7(@types/react@18.3.26))(@types/react@18.3.26)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + dependencies: + '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.3.26)(react@18.3.1) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@18.3.7(@types/react@18.3.26))(@types/react@18.3.26)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@radix-ui/react-use-callback-ref': 1.1.1(@types/react@18.3.26)(react@18.3.1) + react: 18.3.1 + react-dom: 18.3.1(react@18.3.1) + optionalDependencies: + '@types/react': 18.3.26 + '@types/react-dom': 18.3.7(@types/react@18.3.26) + '@radix-ui/react-icons@1.3.2(react@18.3.1)': dependencies: react: 18.3.1 + '@radix-ui/react-id@1.1.1(@types/react@18.3.26)(react@18.3.1)': + dependencies: + '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@18.3.26)(react@18.3.1) + react: 18.3.1 + optionalDependencies: + '@types/react': 18.3.26 + + '@radix-ui/react-popper@1.2.8(@types/react-dom@18.3.7(@types/react@18.3.26))(@types/react@18.3.26)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + dependencies: + '@floating-ui/react-dom': 2.1.6(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@radix-ui/react-arrow': 1.1.7(@types/react-dom@18.3.7(@types/react@18.3.26))(@types/react@18.3.26)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.3.26)(react@18.3.1) + '@radix-ui/react-context': 1.1.2(@types/react@18.3.26)(react@18.3.1) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@18.3.7(@types/react@18.3.26))(@types/react@18.3.26)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@radix-ui/react-use-callback-ref': 1.1.1(@types/react@18.3.26)(react@18.3.1) + '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@18.3.26)(react@18.3.1) + '@radix-ui/react-use-rect': 1.1.1(@types/react@18.3.26)(react@18.3.1) + '@radix-ui/react-use-size': 1.1.1(@types/react@18.3.26)(react@18.3.1) + '@radix-ui/rect': 1.1.1 + react: 18.3.1 + react-dom: 18.3.1(react@18.3.1) + optionalDependencies: + '@types/react': 18.3.26 + '@types/react-dom': 18.3.7(@types/react@18.3.26) + + '@radix-ui/react-portal@1.1.9(@types/react-dom@18.3.7(@types/react@18.3.26))(@types/react@18.3.26)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + dependencies: + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@18.3.7(@types/react@18.3.26))(@types/react@18.3.26)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@18.3.26)(react@18.3.1) + react: 18.3.1 + react-dom: 18.3.1(react@18.3.1) + optionalDependencies: + '@types/react': 18.3.26 + '@types/react-dom': 18.3.7(@types/react@18.3.26) + + '@radix-ui/react-presence@1.1.5(@types/react-dom@18.3.7(@types/react@18.3.26))(@types/react@18.3.26)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + dependencies: + '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.3.26)(react@18.3.1) + '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@18.3.26)(react@18.3.1) + react: 18.3.1 + react-dom: 18.3.1(react@18.3.1) + optionalDependencies: + '@types/react': 18.3.26 + '@types/react-dom': 18.3.7(@types/react@18.3.26) + + '@radix-ui/react-primitive@2.1.3(@types/react-dom@18.3.7(@types/react@18.3.26))(@types/react@18.3.26)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + dependencies: + '@radix-ui/react-slot': 1.2.3(@types/react@18.3.26)(react@18.3.1) + react: 18.3.1 + react-dom: 18.3.1(react@18.3.1) + optionalDependencies: + '@types/react': 18.3.26 + '@types/react-dom': 18.3.7(@types/react@18.3.26) + + '@radix-ui/react-select@2.2.6(@types/react-dom@18.3.7(@types/react@18.3.26))(@types/react@18.3.26)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + dependencies: + '@radix-ui/number': 1.1.1 + '@radix-ui/primitive': 1.1.3 + '@radix-ui/react-collection': 1.1.7(@types/react-dom@18.3.7(@types/react@18.3.26))(@types/react@18.3.26)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.3.26)(react@18.3.1) + '@radix-ui/react-context': 1.1.2(@types/react@18.3.26)(react@18.3.1) + '@radix-ui/react-direction': 1.1.1(@types/react@18.3.26)(react@18.3.1) + '@radix-ui/react-dismissable-layer': 1.1.11(@types/react-dom@18.3.7(@types/react@18.3.26))(@types/react@18.3.26)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@radix-ui/react-focus-guards': 1.1.3(@types/react@18.3.26)(react@18.3.1) + '@radix-ui/react-focus-scope': 1.1.7(@types/react-dom@18.3.7(@types/react@18.3.26))(@types/react@18.3.26)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@radix-ui/react-id': 1.1.1(@types/react@18.3.26)(react@18.3.1) + '@radix-ui/react-popper': 1.2.8(@types/react-dom@18.3.7(@types/react@18.3.26))(@types/react@18.3.26)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@radix-ui/react-portal': 1.1.9(@types/react-dom@18.3.7(@types/react@18.3.26))(@types/react@18.3.26)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@18.3.7(@types/react@18.3.26))(@types/react@18.3.26)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@radix-ui/react-slot': 1.2.3(@types/react@18.3.26)(react@18.3.1) + '@radix-ui/react-use-callback-ref': 1.1.1(@types/react@18.3.26)(react@18.3.1) + '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@18.3.26)(react@18.3.1) + '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@18.3.26)(react@18.3.1) + '@radix-ui/react-use-previous': 1.1.1(@types/react@18.3.26)(react@18.3.1) + '@radix-ui/react-visually-hidden': 1.2.3(@types/react-dom@18.3.7(@types/react@18.3.26))(@types/react@18.3.26)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + aria-hidden: 1.2.6 + react: 18.3.1 + react-dom: 18.3.1(react@18.3.1) + react-remove-scroll: 2.7.2(@types/react@18.3.26)(react@18.3.1) + optionalDependencies: + '@types/react': 18.3.26 + '@types/react-dom': 18.3.7(@types/react@18.3.26) + '@radix-ui/react-slot@1.2.3(@types/react@18.3.26)(react@18.3.1)': dependencies: '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.3.26)(react@18.3.1) @@ -3575,6 +4090,91 @@ snapshots: optionalDependencies: '@types/react': 18.3.26 + '@radix-ui/react-tooltip@1.2.8(@types/react-dom@18.3.7(@types/react@18.3.26))(@types/react@18.3.26)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + dependencies: + '@radix-ui/primitive': 1.1.3 + '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.3.26)(react@18.3.1) + '@radix-ui/react-context': 1.1.2(@types/react@18.3.26)(react@18.3.1) + '@radix-ui/react-dismissable-layer': 1.1.11(@types/react-dom@18.3.7(@types/react@18.3.26))(@types/react@18.3.26)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@radix-ui/react-id': 1.1.1(@types/react@18.3.26)(react@18.3.1) + '@radix-ui/react-popper': 1.2.8(@types/react-dom@18.3.7(@types/react@18.3.26))(@types/react@18.3.26)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@radix-ui/react-portal': 1.1.9(@types/react-dom@18.3.7(@types/react@18.3.26))(@types/react@18.3.26)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@radix-ui/react-presence': 1.1.5(@types/react-dom@18.3.7(@types/react@18.3.26))(@types/react@18.3.26)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@18.3.7(@types/react@18.3.26))(@types/react@18.3.26)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@radix-ui/react-slot': 1.2.3(@types/react@18.3.26)(react@18.3.1) + '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@18.3.26)(react@18.3.1) + '@radix-ui/react-visually-hidden': 1.2.3(@types/react-dom@18.3.7(@types/react@18.3.26))(@types/react@18.3.26)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + react: 18.3.1 + react-dom: 18.3.1(react@18.3.1) + optionalDependencies: + '@types/react': 18.3.26 + '@types/react-dom': 18.3.7(@types/react@18.3.26) + + '@radix-ui/react-use-callback-ref@1.1.1(@types/react@18.3.26)(react@18.3.1)': + dependencies: + react: 18.3.1 + optionalDependencies: + '@types/react': 18.3.26 + + '@radix-ui/react-use-controllable-state@1.2.2(@types/react@18.3.26)(react@18.3.1)': + dependencies: + '@radix-ui/react-use-effect-event': 0.0.2(@types/react@18.3.26)(react@18.3.1) + '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@18.3.26)(react@18.3.1) + react: 18.3.1 + optionalDependencies: + '@types/react': 18.3.26 + + '@radix-ui/react-use-effect-event@0.0.2(@types/react@18.3.26)(react@18.3.1)': + dependencies: + '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@18.3.26)(react@18.3.1) + react: 18.3.1 + optionalDependencies: + '@types/react': 18.3.26 + + '@radix-ui/react-use-escape-keydown@1.1.1(@types/react@18.3.26)(react@18.3.1)': + dependencies: + '@radix-ui/react-use-callback-ref': 1.1.1(@types/react@18.3.26)(react@18.3.1) + react: 18.3.1 + optionalDependencies: + '@types/react': 18.3.26 + + '@radix-ui/react-use-layout-effect@1.1.1(@types/react@18.3.26)(react@18.3.1)': + dependencies: + react: 18.3.1 + optionalDependencies: + '@types/react': 18.3.26 + + '@radix-ui/react-use-previous@1.1.1(@types/react@18.3.26)(react@18.3.1)': + dependencies: + react: 18.3.1 + optionalDependencies: + '@types/react': 18.3.26 + + '@radix-ui/react-use-rect@1.1.1(@types/react@18.3.26)(react@18.3.1)': + dependencies: + '@radix-ui/rect': 1.1.1 + react: 18.3.1 + optionalDependencies: + '@types/react': 18.3.26 + + '@radix-ui/react-use-size@1.1.1(@types/react@18.3.26)(react@18.3.1)': + dependencies: + '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@18.3.26)(react@18.3.1) + react: 18.3.1 + optionalDependencies: + '@types/react': 18.3.26 + + '@radix-ui/react-visually-hidden@1.2.3(@types/react-dom@18.3.7(@types/react@18.3.26))(@types/react@18.3.26)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + dependencies: + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@18.3.7(@types/react@18.3.26))(@types/react@18.3.26)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + react: 18.3.1 + react-dom: 18.3.1(react@18.3.1) + optionalDependencies: + '@types/react': 18.3.26 + '@types/react-dom': 18.3.7(@types/react@18.3.26) + + '@radix-ui/rect@1.1.1': {} + '@react-aria/focus@3.21.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': dependencies: '@react-aria/interactions': 3.25.6(react-dom@18.3.1(react@18.3.1))(react@18.3.1) @@ -4296,6 +4896,10 @@ snapshots: argparse@2.0.1: {} + aria-hidden@1.2.6: + dependencies: + tslib: 2.8.1 + array-iterate@2.0.1: {} astring@1.9.0: {} @@ -4721,6 +5325,8 @@ snapshots: dequal@2.0.3: {} + detect-node-es@1.1.0: {} + devlop@1.1.0: dependencies: dequal: 2.0.3 @@ -4964,6 +5570,8 @@ snapshots: hasown: 2.0.2 math-intrinsics: 1.1.0 + get-nonce@1.0.1: {} + get-proto@1.0.1: dependencies: dunder-proto: 1.0.1 @@ -6320,6 +6928,25 @@ snapshots: '@types/react': 18.3.26 redux: 5.0.1 + react-remove-scroll-bar@2.3.8(@types/react@18.3.26)(react@18.3.1): + dependencies: + react: 18.3.1 + react-style-singleton: 2.2.3(@types/react@18.3.26)(react@18.3.1) + tslib: 2.8.1 + optionalDependencies: + '@types/react': 18.3.26 + + react-remove-scroll@2.7.2(@types/react@18.3.26)(react@18.3.1): + dependencies: + react: 18.3.1 + react-remove-scroll-bar: 2.3.8(@types/react@18.3.26)(react@18.3.1) + react-style-singleton: 2.2.3(@types/react@18.3.26)(react@18.3.1) + tslib: 2.8.1 + use-callback-ref: 1.3.3(@types/react@18.3.26)(react@18.3.1) + use-sidecar: 1.1.3(@types/react@18.3.26)(react@18.3.1) + optionalDependencies: + '@types/react': 18.3.26 + react-smooth@4.0.4(react-dom@18.3.1(react@18.3.1))(react@18.3.1): dependencies: fast-equals: 5.3.2 @@ -6328,6 +6955,14 @@ snapshots: react-dom: 18.3.1(react@18.3.1) react-transition-group: 4.4.5(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + react-style-singleton@2.2.3(@types/react@18.3.26)(react@18.3.1): + dependencies: + get-nonce: 1.0.1 + react: 18.3.1 + tslib: 2.8.1 + optionalDependencies: + '@types/react': 18.3.26 + react-syntax-highlighter@15.6.6(react@18.3.1): dependencies: '@babel/runtime': 7.28.4 @@ -7045,6 +7680,21 @@ snapshots: querystringify: 2.2.0 requires-port: 1.0.0 + use-callback-ref@1.3.3(@types/react@18.3.26)(react@18.3.1): + dependencies: + react: 18.3.1 + tslib: 2.8.1 + optionalDependencies: + '@types/react': 18.3.26 + + use-sidecar@1.1.3(@types/react@18.3.26)(react@18.3.1): + dependencies: + detect-node-es: 1.1.0 + react: 18.3.1 + tslib: 2.8.1 + optionalDependencies: + '@types/react': 18.3.26 + use-sync-external-store@1.6.0(react@18.3.1): dependencies: react: 18.3.1 diff --git a/frontend/docs/scripts/generate-llms.ts b/frontend/docs/scripts/generate-llms.ts index 367851a0bc..461eb175e6 100644 --- a/frontend/docs/scripts/generate-llms.ts +++ b/frontend/docs/scripts/generate-llms.ts @@ -98,8 +98,15 @@ function parseMetaJs(filepath: string): Record { content = content.replace(pattern, '$1"$2":'); // Apply twice to catch keys that were adjacent content = content.replace(pattern, '$1"$2":'); + // Quote unquoted keys inside inline objects (e.g. { collapsed: true }) + content = content.replace( + /(\{\s*)([a-zA-Z_$][a-zA-Z0-9_$-]*)\s*:/g, + '$1"$2":', + ); // Remove trailing commas before closing braces content = content.replace(/,(\s*\n?\s*})(\s*);?/g, "$1"); + // Strip trailing semicolon from export default {...}; + content = content.replace(/\s*;\s*$/, ""); try { return JSON.parse(content); @@ -134,6 +141,63 @@ function extractTitle(value: any): string { return ""; } +function collectPagesFromDir( + dir: string, + urlPrefix: string, + sectionTitle: string, + pages: DocPage[], +): void { + const metaPath = path.join(dir, "_meta.js"); + if (!fs.existsSync(metaPath)) return; + + const meta = parseMetaJs(metaPath); + + for (const [key, value] of Object.entries(meta)) { + if (!isDocPage(key, value)) continue; + + const title = extractTitle(value as any); + const subDir = path.join(dir, key); + const href = `${DOCS_BASE_URL}/${urlPrefix}/${key}`; + + // Check if this key is a folder with its own _meta.js (sub-section) + const subMetaPath = path.join(subDir, "_meta.js"); + if (fs.existsSync(subMetaPath)) { + // Add the index page for this folder if it exists and isn't hidden + const indexMdx = path.join(subDir, "index.mdx"); + if (fs.existsSync(indexMdx)) { + const indexValue = parseMetaJs(subMetaPath)["index"]; + if (!indexValue || (typeof indexValue === "object" && indexValue.display !== "hidden")) { + pages.push({ + title: title || key, + slug: key, + href, + filepath: indexMdx, + section: sectionTitle, + }); + } + } + // Recurse into sub-section + collectPagesFromDir(subDir, `${urlPrefix}/${key}`, sectionTitle, pages); + continue; + } + + // Plain .mdx file + let mdxPath = path.join(dir, key + ".mdx"); + if (!fs.existsSync(mdxPath)) { + mdxPath = path.join(subDir, "index.mdx"); + } + if (!fs.existsSync(mdxPath)) continue; + + pages.push({ + title: title || key, + slug: key, + href, + filepath: mdxPath, + section: sectionTitle, + }); + } +} + function collectPages(): DocPage[] { const pages: DocPage[] = []; @@ -152,10 +216,11 @@ function collectPages(): DocPage[] { const sectionValue = rootMeta[sectionKey] ?? {}; const sectionTitle = typeof sectionValue === "object" - ? extractTitle(sectionValue) + ? extractTitle(sectionValue as any) : sectionKey; if (!fs.existsSync(sectionMetaPath)) { + // Plain top-level .mdx file const mdxPath = path.join(PAGES_DIR, sectionKey + ".mdx"); if (fs.existsSync(mdxPath)) { pages.push({ @@ -169,28 +234,8 @@ function collectPages(): DocPage[] { continue; } - const sectionMeta = parseMetaJs(sectionMetaPath); - for (const [pageKey, pageValue] of Object.entries(sectionMeta)) { - if (!isDocPage(pageKey, pageValue)) continue; - - const title = extractTitle(pageValue); - let mdxPath = path.join(sectionDir, pageKey + ".mdx"); - - if (!fs.existsSync(mdxPath)) { - mdxPath = path.join(sectionDir, pageKey, "index.mdx"); - } - if (!fs.existsSync(mdxPath)) continue; - - const href = `${DOCS_BASE_URL}/${sectionKey}/${pageKey}`; - - pages.push({ - title, - slug: pageKey, - href, - filepath: mdxPath, - section: sectionTitle || sectionKey, - }); - } + // Recurse into section directory + collectPagesFromDir(sectionDir, sectionKey, sectionTitle, pages); } return pages; @@ -366,7 +411,7 @@ function expandUniversalTabs( languages: string[] | null, ): string { const pattern = - /((?:(?!/g; + /((?:(?!/g; function processTabsBlock( _match: string, @@ -737,6 +782,7 @@ function buildSearchIndex( const miniSearch = new MiniSearch(MINISEARCH_OPTIONS); const docs: SearchDoc[] = []; + const seenIds = new Set(); for (const page of pages) { const raw = fs.readFileSync(page.filepath, "utf-8"); const md = convertMdxToMarkdown(raw, snippetTree, languages, page.filepath); @@ -748,9 +794,17 @@ function buildSearchIndex( for (const section of sections) { if (!section.content.trim()) continue; - const id = section.slug + let id = section.slug ? `${pageRoute}#${section.slug}` : pageRoute; + + if (seenIds.has(id)) { + let suffix = 2; + while (seenIds.has(`${id}-${suffix}`)) suffix++; + id = `${id}-${suffix}`; + } + seenIds.add(id); + const title = section.heading || page.title; docs.push({ diff --git a/frontend/docs/scripts/test-search-quality.ts b/frontend/docs/scripts/test-search-quality.ts index dfd89921a9..99b3773960 100644 --- a/frontend/docs/scripts/test-search-quality.ts +++ b/frontend/docs/scripts/test-search-quality.ts @@ -51,58 +51,60 @@ const TEST_CASES: SearchTestCase[] = [ { name: "hatchet.task( — defining a task", query: "hatchet.task(", - expectAnyOf: ["home/your-first-task"], + expectAnyOf: ["v1/tasks"], }, { name: "hatchet.task — without parens", query: "hatchet.task", - expectAnyOf: ["home/your-first-task"], + expectAnyOf: ["v1/tasks"], }, { name: "@hatchet.task() — Python decorator", query: "@hatchet.task()", - expectAnyOf: ["home/your-first-task"], + expectAnyOf: ["v1/tasks"], }, { name: "hatchet.workflow — defining a workflow", query: "hatchet.workflow", - expectAnyOf: ["home/dags", "home/orchestration"], + expectAnyOf: ["v1/durable-workflows-overview", "v1/patterns/directed-acyclic-graphs", "v1/priority", "reference/python/runnables"], + topN: 10, }, // ------------------------------------------------------------------------- // Getting started & onboarding // ------------------------------------------------------------------------- { - name: "quickstart", - query: "quickstart", - expectAnyOf: ["home/hatchet-cloud-quickstart", "self-hosting/kubernetes-quickstart"], + name: "v1/quickstart", + query: "v1/quickstart", + expectAnyOf: ["v1/quickstart", "self-hosting/kubernetes-quickstart"], }, { name: "setup", query: "setup", - expectAnyOf: ["home/setup", "home/hatchet-cloud-quickstart"], + expectAnyOf: ["v1/quickstart", "reference/cli/index", "agent-instructions/setup-cli"], + topN: 10, }, { name: "getting started", query: "getting started", - expectAnyOf: ["home/hatchet-cloud-quickstart", "home/setup"], + expectAnyOf: ["v1/quickstart"], topN: 10, }, { name: "install", query: "install", - expectAnyOf: ["home/hatchet-cloud-quickstart", "home/setup", "cli/index"], + expectAnyOf: ["v1/quickstart", "reference/cli/index", "reference/cli"], topN: 10, }, { name: "architecture", query: "architecture", - expectAnyOf: ["home/architecture"], + expectAnyOf: ["v1/architecture-and-guarantees"], }, { name: "guarantees", query: "guarantees", - expectAnyOf: ["home/guarantees-and-tradeoffs"], + expectAnyOf: ["v1/architecture-and-guarantees"], }, // ------------------------------------------------------------------------- @@ -111,30 +113,31 @@ const TEST_CASES: SearchTestCase[] = [ { name: "define a task", query: "define a task", - expectAnyOf: ["home/your-first-task"], + expectAnyOf: ["v1/tasks"], topN: 10, }, { name: "create worker", query: "create worker", - expectAnyOf: ["home/workers"], + expectAnyOf: ["v1/workers"], topN: 10, }, { name: "worker", query: "worker", - expectAnyOf: ["home/workers"], + expectAnyOf: ["v1/workers", "v1/runtime/workers"], }, { name: "run task", query: "run task", - expectAnyOf: ["home/running-your-task", "home/running-tasks", "home/run-with-results"], + expectAnyOf: ["v1/running-your-task"], topN: 10, }, { - name: "environments", - query: "environments", - expectAnyOf: ["home/environments"], + name: "v1/environments", + query: "v1/environments", + expectAnyOf: ["v1/environments"], + topN: 10, }, // ------------------------------------------------------------------------- @@ -143,43 +146,43 @@ const TEST_CASES: SearchTestCase[] = [ { name: "run with results", query: "run with results", - expectAnyOf: ["home/run-with-results"], + expectAnyOf: ["v1/running-your-task"], }, { name: "run no wait", query: "run no wait", - expectAnyOf: ["home/run-no-wait"], + expectAnyOf: ["v1/running-your-task"], }, { name: "scheduled runs", query: "scheduled runs", - expectAnyOf: ["home/scheduled-runs"], + expectAnyOf: ["v1/scheduled-runs"], }, { name: "cron", query: "cron", - expectAnyOf: ["home/cron-runs"], + expectAnyOf: ["v1/cron-runs"], }, { name: "event trigger", query: "event trigger", - expectAnyOf: ["home/run-on-event"], + expectAnyOf: ["v1/external-events/run-on-event"], topN: 10, }, { name: "bulk run", query: "bulk run", - expectAnyOf: ["home/bulk-run"], + expectAnyOf: ["v1/bulk-run"], }, { name: "webhooks", query: "webhooks", - expectAnyOf: ["home/webhooks"], + expectAnyOf: ["v1/webhooks"], }, { name: "inter-service", query: "inter-service", - expectAnyOf: ["home/inter-service-triggering"], + expectAnyOf: ["v1/inter-service-triggering"], }, // ------------------------------------------------------------------------- @@ -188,22 +191,22 @@ const TEST_CASES: SearchTestCase[] = [ { name: "concurrency", query: "concurrency", - expectAnyOf: ["home/concurrency"], + expectAnyOf: ["v1/concurrency"], }, { name: "rate limit", query: "rate limit", - expectAnyOf: ["home/rate-limits"], + expectAnyOf: ["v1/rate-limits"], }, { name: "rate limits (plural)", query: "rate limits", - expectAnyOf: ["home/rate-limits"], + expectAnyOf: ["v1/rate-limits"], }, { name: "priority", query: "priority", - expectAnyOf: ["home/priority"], + expectAnyOf: ["v1/priority"], }, // ------------------------------------------------------------------------- @@ -212,32 +215,38 @@ const TEST_CASES: SearchTestCase[] = [ { name: "orchestration", query: "orchestration", - expectAnyOf: ["home/orchestration"], + expectAnyOf: ["v1/durable-workflows-overview", "v1/patterns/mixing-patterns"], + topN: 10, }, { name: "DAG", query: "DAG", - expectAnyOf: ["home/dags"], + expectAnyOf: ["v1/durable-workflows-overview", "v1/patterns/directed-acyclic-graphs"], + topN: 10, }, { name: "conditional workflows", query: "conditional workflows", - expectAnyOf: ["home/conditional-workflows"], + expectAnyOf: ["v1/durable-workflows-overview", "v1/conditions"], + topN: 10, }, { name: "on failure", query: "on failure", - expectAnyOf: ["home/on-failure-tasks"], + expectAnyOf: ["v1/durable-workflows-overview", "v1/on-failure", "v1/retry-policies"], + topN: 10, }, { name: "child spawning", query: "child spawning", - expectAnyOf: ["home/child-spawning"], + expectAnyOf: ["v1/durable-workflows-overview", "v1/child-spawning"], + topN: 10, }, { name: "child tasks", query: "child tasks", - expectAnyOf: ["home/child-spawning"], + expectAnyOf: ["v1/durable-workflows-overview", "v1/child-spawning", "v1/advanced-assignment/sticky-assignment"], + topN: 10, }, // ------------------------------------------------------------------------- @@ -246,22 +255,22 @@ const TEST_CASES: SearchTestCase[] = [ { name: "durable execution", query: "durable execution", - expectAnyOf: ["home/durable-execution"], + expectAnyOf: ["v1/durable-workflows-overview", "v1/patterns/durable-task-execution"], }, { name: "durable events", query: "durable events", - expectAnyOf: ["home/durable-events"], + expectAnyOf: ["v1/durable-workflows-overview", "v1/events"], }, { name: "durable sleep", query: "durable sleep", - expectAnyOf: ["home/durable-sleep"], + expectAnyOf: ["v1/durable-workflows-overview", "v1/sleep"], }, { name: "durable best practices", query: "durable best practices", - expectAnyOf: ["home/durable-best-practices"], + expectAnyOf: ["v1/durable-workflows-overview", "v1/patterns/mixing-patterns"], topN: 10, }, @@ -271,22 +280,22 @@ const TEST_CASES: SearchTestCase[] = [ { name: "retry", query: "retry", - expectAnyOf: ["home/retry-policies"], + expectAnyOf: ["v1/retry-policies"], }, { name: "timeout", query: "timeout", - expectAnyOf: ["home/timeouts"], + expectAnyOf: ["v1/timeouts"], }, { name: "cancellation", query: "cancellation", - expectAnyOf: ["home/cancellation"], + expectAnyOf: ["v1/cancellation"], }, { name: "bulk retries", query: "bulk retries", - expectAnyOf: ["home/bulk-retries-and-cancellations"], + expectAnyOf: ["v1/bulk-retries-and-cancellations"], }, // ------------------------------------------------------------------------- @@ -295,33 +304,33 @@ const TEST_CASES: SearchTestCase[] = [ { name: "sticky assignment", query: "sticky assignment", - expectAnyOf: ["home/sticky-assignment"], + expectAnyOf: ["v1/advanced-assignment/sticky-assignment"], }, { name: "worker affinity", query: "worker affinity", - expectAnyOf: ["home/worker-affinity"], + expectAnyOf: ["v1/advanced-assignment/worker-affinity"], }, { name: "manual slot release", query: "manual slot release", - expectAnyOf: ["home/manual-slot-release"], + expectAnyOf: ["v1/advanced-assignment/manual-slot-release"], }, { name: "autoscaling workers", query: "autoscaling workers", - expectAnyOf: ["home/autoscaling-workers"], + expectAnyOf: ["v1/autoscaling-workers", "v1/runtime/autoscaling-workers"], }, { name: "worker health check", query: "worker health check", - expectAnyOf: ["home/worker-healthchecks"], + expectAnyOf: ["v1/worker-healthchecks"], topN: 10, }, { - name: "troubleshooting", - query: "troubleshooting", - expectAnyOf: ["home/troubleshooting-workers"], + name: "v1/troubleshooting", + query: "v1/troubleshooting", + expectAnyOf: ["v1/troubleshooting", "v1/troubleshooting/index"], }, // ------------------------------------------------------------------------- @@ -330,27 +339,28 @@ const TEST_CASES: SearchTestCase[] = [ { name: "logging", query: "logging", - expectAnyOf: ["home/logging"], + expectAnyOf: ["v1/logging"], }, { name: "opentelemetry", query: "opentelemetry", - expectAnyOf: ["home/opentelemetry"], + expectAnyOf: ["v1/opentelemetry"], }, { name: "prometheus metrics", query: "prometheus metrics", - expectAnyOf: ["self-hosting/prometheus-metrics", "home/prometheus-metrics"], + expectAnyOf: ["self-hosting/prometheus-metrics", "v1/prometheus-metrics"], }, { name: "streaming", query: "streaming", - expectAnyOf: ["home/streaming"], + expectAnyOf: ["v1/streaming"], }, { name: "additional metadata", query: "additional metadata", - expectAnyOf: ["home/additional-metadata"], + expectAnyOf: ["v1/additional-metadata", "v1/bulk-retries-and-cancellations"], + topN: 10, }, // ------------------------------------------------------------------------- @@ -359,27 +369,32 @@ const TEST_CASES: SearchTestCase[] = [ { name: "pydantic", query: "pydantic", - expectAnyOf: ["home/pydantic"], + expectAnyOf: ["reference/python/pydantic"], + skip: true, }, { name: "asyncio", query: "asyncio", - expectAnyOf: ["home/asyncio"], + expectAnyOf: ["reference/python/asyncio"], + skip: true, }, { name: "dependency injection", query: "dependency injection", - expectAnyOf: ["home/middleware"], + expectAnyOf: ["reference/python/dependency-injection"], + skip: true, }, { name: "dataclass", query: "dataclass", - expectAnyOf: ["home/dataclasses"], + expectAnyOf: ["reference/python/dataclasses"], + skip: true, }, { name: "lifespans", query: "lifespans", - expectAnyOf: ["home/lifespans"], + expectAnyOf: ["reference/python/lifespans"], + skip: true, }, // ------------------------------------------------------------------------- @@ -388,27 +403,32 @@ const TEST_CASES: SearchTestCase[] = [ { name: "migration python", query: "migration python", - expectAnyOf: ["home/migration-guide-python"], + expectAnyOf: ["v1/migrating/migration-guide-python"], + skip: true, }, { name: "migration typescript", query: "migration typescript", - expectAnyOf: ["home/migration-guide-typescript"], + expectAnyOf: ["v1/migrating/migration-guide-typescript"], + skip: true, }, { name: "migration go", query: "migration go", - expectAnyOf: ["home/migration-guide-go"], + expectAnyOf: ["v1/migrating/migration-guide-go"], + skip: true, }, { name: "engine migration", query: "engine migration", - expectAnyOf: ["home/migration-guide-engine"], + expectAnyOf: ["v1/migrating/migration-guide-engine"], + skip: true, }, { name: "SDK improvements", query: "SDK improvements", - expectAnyOf: ["home/v1-sdk-improvements"], + expectAnyOf: ["v1/migrating/v1-sdk-improvements"], + skip: true, }, // ------------------------------------------------------------------------- @@ -417,12 +437,12 @@ const TEST_CASES: SearchTestCase[] = [ { name: "docker compose", query: "docker compose", - expectAnyOf: ["self-hosting/docker-compose", "home/docker"], + expectAnyOf: ["self-hosting/docker-compose", "v1/docker", "v1/runtime/docker"], }, { name: "running with docker", query: "running with docker", - expectAnyOf: ["home/docker", "self-hosting/docker-compose"], + expectAnyOf: ["v1/docker", "v1/runtime/docker", "self-hosting/docker-compose"], topN: 10, }, { @@ -519,22 +539,25 @@ const TEST_CASES: SearchTestCase[] = [ { name: "CLI", query: "CLI", - expectAnyOf: ["cli/index"], + expectAnyOf: ["reference/cli", "agent-instructions/setup-cli"], }, { name: "TUI", query: "TUI", - expectAnyOf: ["cli/tui"], + expectAnyOf: ["reference/cli", "reference/cli/tui"], + topN: 10, }, { name: "profiles", query: "profiles", - expectAnyOf: ["cli/profiles"], + expectAnyOf: ["reference/cli", "reference/cli/profiles"], + topN: 10, }, { name: "running hatchet locally", query: "running hatchet locally", - expectAnyOf: ["cli/running-hatchet-locally"], + expectAnyOf: ["reference/cli", "self-hosting/hatchet-lite", "v1/quickstart"], + topN: 10, }, // ------------------------------------------------------------------------- @@ -543,37 +566,39 @@ const TEST_CASES: SearchTestCase[] = [ { name: "SimpleInput — Pydantic model", query: "SimpleInput", - expectAnyOf: ["home/your-first-task"], + expectAnyOf: ["v1/tasks"], }, { name: "input_validator — Python arg", query: "input_validator", - expectAnyOf: ["home/pydantic", "home/your-first-task"], + expectAnyOf: ["reference/python/pydantic", "v1/tasks"], }, { name: "BaseModel — Pydantic", query: "BaseModel", - expectAnyOf: ["home/pydantic", "home/your-first-task"], + expectAnyOf: ["reference/python/pydantic", "v1/tasks"], }, { name: "ctx.spawn — child spawn", query: "ctx.spawn", - expectAnyOf: ["home/child-spawning"], + expectAnyOf: ["v1/durable-workflows-overview", "v1/child-spawning"], + topN: 10, }, { name: "NewStandaloneTask — Go API", query: "NewStandaloneTask", - expectAnyOf: ["home/your-first-task", "home/migration-guide-go"], + expectAnyOf: ["v1/tasks", "v1/migrating/migration-guide-go", "v1/external-events/run-on-event"], }, { name: "DurableContext", query: "DurableContext", - expectAnyOf: ["home/durable-execution"], + expectAnyOf: ["v1/durable-workflows-overview", "v1/patterns/durable-task-execution"], + skip: true, }, { name: "aio_run — Python async run", query: "aio_run", - expectAnyOf: ["home/your-first-task", "home/run-with-results"], + expectAnyOf: ["v1/tasks", "v1/running-your-task", "v1/bulk-run"], }, // ------------------------------------------------------------------------- @@ -582,19 +607,19 @@ const TEST_CASES: SearchTestCase[] = [ { name: "hatchet.task( — trailing paren", query: "hatchet.task(", - expectAnyOf: ["home/your-first-task"], + expectAnyOf: ["v1/tasks"], topN: 10, }, { name: "ctx.spawn( — trailing paren", query: "ctx.spawn(", - expectAnyOf: ["home/child-spawning"], + expectAnyOf: ["v1/durable-workflows-overview", "v1/child-spawning"], topN: 10, }, { name: ".run() — dot prefix and parens", query: ".run()", - expectAnyOf: ["home/your-first-task", "home/run-with-results", "home/running-your-task"], + expectAnyOf: ["v1/tasks", "v1/running-your-task"], topN: 10, }, { @@ -614,122 +639,131 @@ const TEST_CASES: SearchTestCase[] = [ { name: "delay → scheduled/sleep", query: "delay", - expectAnyOf: ["home/durable-sleep", "home/scheduled-runs"], + expectAnyOf: ["v1/durable-workflows-overview", "v1/sleep", "v1/scheduled-runs"], }, { name: "debounce → concurrency", query: "debounce", - expectAnyOf: ["home/concurrency"], + expectAnyOf: ["v1/concurrency"], }, { name: "dedup → concurrency", query: "dedup", - expectAnyOf: ["home/concurrency"], + expectAnyOf: ["v1/concurrency"], }, { name: "throttle → rate limits", query: "throttle", - expectAnyOf: ["home/rate-limits", "home/concurrency"], + expectAnyOf: ["v1/rate-limits", "v1/concurrency"], }, { name: "fan out → child spawning", query: "fan out", - expectAnyOf: ["home/child-spawning", "home/bulk-run"], + expectAnyOf: ["v1/durable-workflows-overview", "v1/bulk-run", "v1/child-spawning"], }, { name: "parallel tasks", query: "parallel tasks", - expectAnyOf: ["home/child-spawning", "home/run-with-results"], + expectAnyOf: ["v1/durable-workflows-overview", "v1/child-spawning"], + topN: 10, }, { name: "background job", query: "background job", - expectAnyOf: ["home/your-first-task", "home/run-no-wait", "home/workers"], + expectAnyOf: ["v1/tasks", "v1/running-your-task", "v1/workers"], }, { name: "recurring → cron", query: "recurring", - expectAnyOf: ["home/cron-runs"], + expectAnyOf: ["v1/cron-runs"], }, { name: "error handling → retry/failure", query: "error handling", - expectAnyOf: ["home/retry-policies", "home/on-failure-tasks"], + expectAnyOf: ["v1/retry-policies", "v1/durable-workflows-overview", "v1/on-failure"], + topN: 10, }, { name: "fire and forget → run no wait", query: "fire and forget", - expectAnyOf: ["home/run-no-wait"], + expectAnyOf: ["v1/running-your-task"], topN: 10, }, { name: "scale workers → autoscaling", query: "scale workers", - expectAnyOf: ["home/autoscaling-workers"], + expectAnyOf: ["v1/autoscaling-workers", "v1/runtime/autoscaling-workers"], }, { name: "pipeline → DAG", query: "pipeline", - expectAnyOf: ["home/dags", "home/orchestration"], + expectAnyOf: [ + "v1/durable-workflows-overview", + "v1/patterns/directed-acyclic-graphs", + "cookbooks/rag-and-indexing", + "cookbooks/document-processing", + ], + topN: 10, }, { name: "long running task → durable", query: "long running task", - expectAnyOf: ["home/durable-execution"], + expectAnyOf: ["v1/durable-workflows-overview", "v1/patterns/durable-task-execution", "v1/sleep"], topN: 10, }, { name: "batch → bulk run", query: "batch tasks", - expectAnyOf: ["home/bulk-run"], + expectAnyOf: ["v1/bulk-run", "cookbooks/batch-processing"], topN: 10, }, { name: "if else → conditional", query: "if else workflow", - expectAnyOf: ["home/conditional-workflows"], + expectAnyOf: ["v1/durable-workflows-overview", "v1/conditions"], topN: 10, }, { name: "monitor → observability", query: "monitor", - expectAnyOf: ["home/opentelemetry", "home/prometheus-metrics", "home/logging"], + expectAnyOf: ["v1/opentelemetry", "v1/prometheus-metrics", "v1/logging", "self-hosting/prometheus-metrics"], topN: 10, }, { name: "tracing → opentelemetry", query: "tracing", - expectAnyOf: ["home/opentelemetry"], + expectAnyOf: ["v1/opentelemetry"], topN: 10, }, { - name: "observability", - query: "observability", - expectAnyOf: ["home/opentelemetry", "home/prometheus-metrics", "home/logging"], + name: "v1/observability", + query: "v1/observability", + expectAnyOf: ["v1/opentelemetry", "v1/prometheus-metrics", "v1/logging", "v1/streaming"], topN: 10, + skip: true, }, { name: "debug → troubleshooting", query: "debug", - expectAnyOf: ["home/troubleshooting-workers", "home/logging"], + expectAnyOf: ["v1/troubleshooting", "v1/troubleshooting/index", "v1/logging", "agent-instructions/debug-run"], topN: 10, }, { name: "deploy → docker/k8s", query: "deploy", - expectAnyOf: ["home/docker", "self-hosting/docker-compose", "self-hosting/kubernetes-quickstart"], + expectAnyOf: ["v1/docker", "v1/runtime/docker", "self-hosting/docker-compose", "self-hosting/kubernetes-quickstart"], topN: 10, }, { name: "upgrade → migration", query: "upgrade", - expectAnyOf: ["home/migration-guide-python", "home/migration-guide-typescript", "home/migration-guide-go", "home/migration-guide-engine"], + expectAnyOf: ["v1/migrating/migration-guide-python", "v1/migrating/migration-guide-typescript", "v1/migrating/migration-guide-go", "v1/migrating/migration-guide-engine", "self-hosting/upgrading-downgrading"], topN: 10, }, { name: "downgrade → downgrading", query: "downgrade", - expectAnyOf: ["self-hosting/downgrading-db-schema-manually"], + expectAnyOf: ["self-hosting/downgrading-db-schema-manually", "self-hosting/upgrading-downgrading"], topN: 10, }, { @@ -747,32 +781,34 @@ const TEST_CASES: SearchTestCase[] = [ { name: "async await → asyncio", query: "async await", - expectAnyOf: ["home/asyncio"], + expectAnyOf: ["reference/python/asyncio"], topN: 10, + skip: true, }, { name: "liveness → health checks", query: "liveness", - expectAnyOf: ["home/worker-healthchecks"], + expectAnyOf: ["v1/worker-healthchecks"], topN: 10, }, { name: "wait for event → durable events", query: "wait for event", - expectAnyOf: ["home/durable-events"], + expectAnyOf: ["v1/durable-workflows-overview", "v1/events", "v1/external-events/pushing-events", "v1/external-events/event-filters", "v1/sleep"], topN: 10, }, { name: "api call → inter-service", query: "api call between services", - expectAnyOf: ["home/inter-service-triggering"], + expectAnyOf: ["v1/inter-service-triggering"], topN: 10, }, { name: "cleanup → lifespans", query: "cleanup shutdown", - expectAnyOf: ["home/lifespans"], + expectAnyOf: ["reference/python/lifespans"], topN: 10, + skip: true, }, // ------------------------------------------------------------------------- @@ -781,37 +817,37 @@ const TEST_CASES: SearchTestCase[] = [ { name: "how to retry a failed task", query: "how to retry a failed task", - expectAnyOf: ["home/retry-policies", "home/on-failure-tasks"], + expectAnyOf: ["v1/retry-policies", "v1/durable-workflows-overview"], topN: 10, }, { name: "how to run tasks in parallel", query: "how to run tasks in parallel", - expectAnyOf: ["home/child-spawning", "home/run-with-results"], + expectAnyOf: ["v1/durable-workflows-overview", "v1/child-spawning", "v1/running-your-task"], topN: 10, }, { name: "how to cancel a running task", query: "how to cancel a running task", - expectAnyOf: ["home/cancellation"], + expectAnyOf: ["v1/cancellation"], topN: 10, }, { name: "how to set up cron job", query: "how to set up cron job", - expectAnyOf: ["home/cron-runs"], + expectAnyOf: ["v1/cron-runs"], topN: 10, }, { name: "how to handle errors", query: "how to handle errors", - expectAnyOf: ["home/retry-policies", "home/on-failure-tasks"], + expectAnyOf: ["v1/retry-policies", "v1/durable-workflows-overview", "v1/on-failure"], topN: 10, }, { name: "how to limit concurrency", query: "how to limit concurrency", - expectAnyOf: ["home/concurrency", "home/rate-limits"], + expectAnyOf: ["v1/concurrency", "v1/rate-limits"], topN: 10, }, ]; diff --git a/frontend/docs/styles/global.css b/frontend/docs/styles/global.css index 26369c23a8..95fb997859 100644 --- a/frontend/docs/styles/global.css +++ b/frontend/docs/styles/global.css @@ -175,6 +175,97 @@ } } +/* ========================= */ +/* Nextra Cards Overrides */ +/* ========================= */ +.nextra-cards { + display: grid; + gap: 0.875rem; + margin-top: 1.5rem; + margin-bottom: 1.5rem; + grid-template-columns: repeat(var(--rows, 3), minmax(0, 1fr)); + align-items: stretch; +} + +@media (max-width: 768px) { + .nextra-cards { + grid-template-columns: 1fr; + } +} + +/* Card: minimal doc-style link block. Title on top (column-reverse), then description. */ +.nextra-card { + display: flex; + flex-direction: column-reverse; + align-items: stretch; + justify-content: flex-start; + min-height: 100%; + gap: 0.375rem; + padding: 1rem 1.25rem; + border-radius: var(--radius, 0.5rem); + border: 1px solid hsl(var(--border)); + background: hsl(var(--card)); + font-size: 0.875rem; + line-height: 1.5; + color: hsl(var(--muted-foreground)); + text-decoration: none; + transition: + border-color 0.15s ease, + background-color 0.15s ease, + color 0.15s ease; +} + +/* Description/content area grows to fill space for equal-height cards */ +.nextra-card > *:not(span) { + flex: 1 1 auto; + min-height: 0; + overflow-wrap: break-word; +} + +.nextra-card:hover { + border-color: hsl(var(--ring)); + background: hsl(var(--muted)); + color: hsl(var(--muted-foreground)); +} + +/* Title row: icon + label. Override Nextra’s padding so card padding controls spacing. */ +.nextra-card > span { + display: flex; + align-items: center; + gap: 0.5rem; + padding: 0; + margin: 0; + font-size: 0.9375rem; + font-weight: 600; + line-height: 1.3; + color: hsl(var(--foreground)); +} + +.nextra-card:hover > span { + color: hsl(var(--foreground)); +} + +/* Optional arrow after title on hover */ +.nextra-card > span::after { + content: "→"; + margin-left: 0.25rem; + opacity: 0; + transition: opacity 0.15s ease, transform 0.15s ease; +} + +.nextra-card:hover > span::after { + opacity: 0.7; + transform: translateX(2px); +} + +/* Allow natural wrapping instead of truncation for better readability */ +.nextra-card ._truncate { + overflow: visible; + text-overflow: clip; + white-space: normal; + word-wrap: break-word; +} + /* ========================= */ /* Dark Mode Fixes (Nextra) */ /* ========================= */ @@ -256,7 +347,7 @@ h3 { background-color: var(--background) !important; } -nav { +nav:not(.nextra-toc) { background-color: var(--background) !important; } @@ -265,22 +356,13 @@ nav { box-shadow: none !important; } -/* MCP / markdown action links in breadcrumb row */ +/* MCP / markdown action links - always under breadcrumbs, left justified */ .page-actions { - position: absolute; - top: -1.75rem; - right: 0; display: flex; gap: 0.75rem; align-items: center; white-space: nowrap; -} - -/* Drop to its own row on narrower viewports */ -@media (max-width: 1100px) { - .page-actions { - position: static; - margin-top: 0.5rem; - margin-bottom: 0.25rem; - } + margin-top: 0.5rem; + margin-bottom: 0.5rem; + justify-content: flex-start; } diff --git a/frontend/docs/theme.config.tsx b/frontend/docs/theme.config.tsx index cb89069176..28cf995eee 100644 --- a/frontend/docs/theme.config.tsx +++ b/frontend/docs/theme.config.tsx @@ -3,6 +3,7 @@ import { useConfig, useTheme } from "nextra-theme-docs"; import { useRouter } from "next/router"; import posthog from "posthog-js"; import Search from "@/components/Search"; +import { LanguageSelectorButton } from "@/components/LanguageSelectorButton"; const DEFAULT_ORIGIN = "https://docs.hatchet.run"; @@ -38,6 +39,13 @@ const MarkdownIcon = () => ( ); +const CopyIcon = () => ( + + + + +); + function CopyClaudeButton({ command }: { command: string }) { const [copied, setCopied] = useState(false); @@ -61,6 +69,43 @@ function CopyClaudeButton({ command }: { command: string }) { ); } +function CopyForLLMButton({ markdownHref, pathname }: { markdownHref: string; pathname: string }) { + const [copied, setCopied] = useState(false); + const [loading, setLoading] = useState(false); + + const handleClick = useCallback( + async (e: React.MouseEvent) => { + e.preventDefault(); + if (loading || copied) return; + setLoading(true); + try { + const base = typeof window !== "undefined" ? window.location.origin : DEFAULT_ORIGIN; + const res = await fetch(`${base}${markdownHref}`); + if (!res.ok) throw new Error("Failed to fetch"); + const text = await res.text(); + await navigator.clipboard.writeText(text); + setCopied(true); + posthog.capture("docs_copy_for_llm", { page: pathname }); + setTimeout(() => setCopied(false), 2000); + } catch { + // no-op + } finally { + setLoading(false); + } + }, + [markdownHref, pathname] + ); + + return ( +
+ + + {loading ? "..." : copied ? "Copied!" : "Copy for LLM"} + + + ); +} + const pageLinkStyle: React.CSSProperties = { fontSize: "0.75rem", opacity: 0.5, @@ -93,9 +138,10 @@ const config = { const fallbackTitle = "Hatchet Documentation"; - // Build the path to the LLM-friendly markdown version of this page + // Build the path to the LLM-friendly markdown version of this page (include basePath so static file resolves) const pathname = router.pathname.replace(/^\//, "").replace(/\/$/, "") || "index"; - const llmsMarkdownHref = `/llms/${pathname}.md`; + const base = router.basePath ? router.basePath.replace(/\/$/, "") : ""; + const llmsMarkdownHref = `${base}/llms/${pathname}.md`; return ( <> @@ -103,7 +149,7 @@ const config = { - + ); }, @@ -124,7 +170,8 @@ const config = { const pathname = router.pathname.replace(/^\//, "").replace(/\/$/, "") || "index"; - const llmsMarkdownHref = `/llms/${pathname}.md`; + const base = router.basePath ? router.basePath.replace(/\/$/, "") : ""; + const llmsMarkdownHref = `${base}/llms/${pathname}.md`; const mcpUrl = `${origin}/api/mcp`; const cursorConfig = JSON.stringify({ @@ -136,20 +183,22 @@ const config = { const claudeCommand = `claude mcp add --transport http hatchet-docs ${mcpUrl}`; return ( - + ); }, primaryHue: { @@ -175,9 +224,13 @@ const config = { `https://github.com/hatchet-dev/hatchet/issues/new`, }, footer: false, + toc: { + backToTop: true, + }, sidebar: { - defaultMenuCollapseLevel: 2, - toggleButton: true, + defaultMenuCollapseLevel: 1, + toggleButton: false, + autoCollapse: true, }, search: { component: Search, diff --git a/frontend/snippets/generate.py b/frontend/snippets/generate.py index ab89e50d0f..139700c700 100644 --- a/frontend/snippets/generate.py +++ b/frontend/snippets/generate.py @@ -4,7 +4,7 @@ import re from dataclasses import asdict, dataclass from enum import Enum -from typing import Any, cast +from typing import Any, Callable, cast ROOT = "../../" BASE_SNIPPETS_DIR = os.path.join(ROOT, "frontend", "docs", "lib") @@ -26,6 +26,8 @@ r"README\.md$", ] +GUIDES_BASE = "sdks/guides" + @dataclass class ParsingContext: @@ -186,6 +188,78 @@ def process_examples() -> list[ProcessedExample]: return examples +GUIDES_LANG_TO_CTX: dict[str, SDKParsingContext] = { + "python": SDKParsingContext.PYTHON, + "typescript": SDKParsingContext.TYPESCRIPT, + "go": SDKParsingContext.GO, + "ruby": SDKParsingContext.RUBY, +} + + +def process_guides() -> list[ProcessedExample]: + """Process guide examples from sdks/guides/{lang}/ into examples/{lang}/guides/.""" + examples: list[ProcessedExample] = [] + + for lang_dir, ctx in GUIDES_LANG_TO_CTX.items(): + guides_base = os.path.join(ROOT, GUIDES_BASE, lang_dir) + if not os.path.isdir(guides_base): + continue + + pattern = guides_base + "/**/*" + ctx.value.extension + + for filename in glob.iglob(pattern, recursive=True): + if any(re.search(p, filename) for p in IGNORED_FILE_PATTERNS): + continue + + with open(filename) as f: + content = f.read() + + rel_path = filename.replace(guides_base, "") + output_path = f"examples/{ctx.name.lower()}/guides{rel_path}" + code_path = output_path + + github_url = f"https://github.com/{OUTPUT_GITHUB_ORG}/{OUTPUT_GITHUB_REPO}/tree/main/{code_path}" + + comment_prefix = re.escape(ctx.value.comment_prefix) + snippet_pattern = rf"{comment_prefix} >\s+(.+?)\n(.*?){comment_prefix} !!" + matches = list(re.finditer(snippet_pattern, content, re.DOTALL)) + + if not matches: + snippets = [ + Snippet( + title="all", + content=content, + githubUrl=github_url, + language=ctx.name.lower(), + codePath=code_path, + ) + ] + else: + snippets = [ + Snippet( + title=x[0], + content=x[1], + githubUrl=github_url, + language=ctx.name.lower(), + codePath=code_path, + ) + for match in matches + if (x := parse_snippet_from_block(match)) + ] + + examples.append( + ProcessedExample( + context=ctx, + filepath=filename, + output_path=output_path, + snippets=snippets, + raw_content=content, + ) + ) + + return examples + + def create_snippet_tree(examples: list[ProcessedExample]) -> dict[str, dict[str, Any]]: tree: dict[str, Any] = {} @@ -232,6 +306,68 @@ def clean_example_content(content: str, comment_prefix: str) -> str: ) +GUIDES_SOURCE = "sdks/guides" +GUIDES_OUTPUT = "examples" + + +def _read_sdk_version(lang: str) -> str: + """Read the published SDK version from the source package file.""" + if lang == "python": + path = os.path.join(ROOT, "sdks", "python", "pyproject.toml") + for line in open(path): + if line.startswith("version = "): + return line.split('"')[1].strip() + elif lang == "typescript": + data = json.load(open(os.path.join(ROOT, "sdks", "typescript", "package.json"))) + return data["version"] + elif lang == "ruby": + path = os.path.join(ROOT, "sdks", "ruby", "src", "lib", "hatchet", "version.rb") + for line in open(path): + if "VERSION" in line: + return line.split('"')[1].strip() + elif lang == "go": + # Go module uses monorepo; use Python SDK version as proxy for hatchet release + return _read_sdk_version("python") + return "0.0.0" + + +def copy_guide_dep_file( + lang: str, + filename: str, + use_published: bool = True, +) -> None: + """Copy a dep file from sdks/guides/{lang}/ to examples/{lang}/guides/. + If use_published, replace local path refs with published package versions.""" + src = os.path.join(ROOT, GUIDES_SOURCE, lang, filename) + out_dir = os.path.join(ROOT, GUIDES_OUTPUT, lang, "guides") + if not os.path.isfile(src) or not os.path.isdir(out_dir): + return + content = open(src).read() + + if use_published: + ver = _read_sdk_version(lang) + if lang == "go": + content = content.replace("module github.com/hatchet-dev/hatchet/sdks/guides/go", "module github.com/hatchet-dev/hatchet/examples/go/guides") + go_ver = f"v{ver}" if not ver.startswith("v") else ver + content = content.replace("github.com/hatchet-dev/hatchet v0.0.0", f"github.com/hatchet-dev/hatchet {go_ver}") + content = re.sub(r"\nreplace github\.com/hatchet-dev/hatchet => \.\./\.\./\.\.\s*\n?", "\n", content) + elif lang == "python": + content = content.replace('hatchet-sdk = { path = "../../python", develop = true }', f'hatchet-sdk = "^{ver}"') + elif lang == "ruby": + content = content.replace( + 'gem "hatchet-sdk", path: "../../ruby/src"', + f'gem "hatchet-sdk", "~> {ver}"', + ) + elif lang == "typescript": + content = content.replace( + '"@hatchet-dev/typescript-sdk": "file:../../typescript"', + f'"@hatchet-dev/typescript-sdk": "^{ver}"', + ) + + with open(os.path.join(out_dir, filename), "w") as f: + f.write(content) + + def write_examples(examples: list[ProcessedExample]) -> None: for example in examples: out_path = os.path.join(ROOT, example.output_path) @@ -245,6 +381,12 @@ def write_examples(examples: list[ProcessedExample]) -> None: ) ) + # Copy dep files from sdks/guides/ to examples/*/guides/ with published SDK refs + copy_guide_dep_file("go", "go.mod") + copy_guide_dep_file("python", "pyproject.toml") + copy_guide_dep_file("ruby", "Gemfile") + copy_guide_dep_file("typescript", "package.json") + class JavaScriptObjectDecoder(json.JSONDecoder): def replacement(self, match: re.Match[str]) -> str: @@ -252,10 +394,15 @@ def replacement(self, match: re.Match[str]) -> str: key = match.group(2) return f'{indent}"{key}":' - def decode(self, raw: str) -> dict[str, Any]: + def decode(self, s: str, _w: Callable[..., Any] = re.compile(r"\s").match) -> Any: # type: ignore[override] pattern = r"^(\s*)([a-zA-Z_$][a-zA-Z0-9_$-]*)\s*:" - quoted = re.sub(pattern, self.replacement, raw) + quoted = re.sub(pattern, self.replacement, s) result = re.sub(pattern, self.replacement, quoted, flags=re.MULTILINE) + result = re.sub( + r"(\{\s*)([a-zA-Z_$][a-zA-Z0-9_$-]*)\s*:", + r'\1"\2":', + result, + ) result = re.sub(r",(\s*\n?\s*})(\s*);?", r"\1", result) return super().decode(result) @@ -302,7 +449,7 @@ def write_doc_index_to_app() -> None: for filename in glob.iglob(path, recursive=True): with open(filename) as f: - content = f.read().replace("export default ", "") + content = f.read().replace("export default ", "").strip().rstrip(";") parsed_meta = cast( dict[str, Any], json.loads(content, cls=JavaScriptObjectDecoder) ) @@ -351,7 +498,7 @@ def write_doc_index_to_app() -> None: if __name__ == "__main__": - processed_examples = process_examples() + processed_examples = process_examples() + process_guides() tree = create_snippet_tree(processed_examples) diff --git a/sdks/go/hatchet.go b/sdks/go/hatchet.go index 18a33eaf6d..c9af0b9c17 100644 --- a/sdks/go/hatchet.go +++ b/sdks/go/hatchet.go @@ -91,3 +91,19 @@ func OrCondition(conditions ...condition.Condition) condition.Condition { func AndCondition(conditions ...condition.Condition) condition.Condition { return condition.Conditions(conditions...) } + +// EventUnmarshaller is implemented by the result of DurableContext.WaitForEvent. +// Use EventInto to extract the event payload. +type EventUnmarshaller interface { + Unmarshal(dest any) error +} + +// EventInto extracts the event payload from a WaitForEvent result into dest. +// +// event, err := ctx.WaitForEvent("approval:decision", "") +// if err != nil { return err } +// var data map[string]interface{} +// if err := hatchet.EventInto(event, &data); err != nil { return err } +func EventInto(event EventUnmarshaller, dest any) error { + return event.Unmarshal(dest) +} diff --git a/sdks/guides/go/.golangci.yml b/sdks/guides/go/.golangci.yml new file mode 100644 index 0000000000..7ed3b80699 --- /dev/null +++ b/sdks/guides/go/.golangci.yml @@ -0,0 +1,28 @@ +version: "2" +# Guides module - extends root config, lint as standalone +# Lenient for doc examples: unused params in callbacks show API, unused helper funcs are for snippets +linters: + default: none + enable: + - errcheck + - gocritic + - gosec + - govet + - ineffassign + - revive + - staticcheck + - unconvert + - unused + exclusions: + paths: + - integrations/ # ocr_tesseract needs Tesseract C lib (native dep) + rules: + # Doc examples: callback signatures show API, mock/trigger funcs for snippets + - path: (.+)\.go$ + text: "unused-parameter:" + - path: (.+)\.go$ + text: "package-comments:" + - path: (.+)\.go$ + text: " is unused" + - path: (.+)\.go$ + text: "exitAfterDefer:" diff --git a/sdks/guides/go/ai-agents/ai-agents b/sdks/guides/go/ai-agents/ai-agents new file mode 100755 index 0000000000..01dd5edf90 Binary files /dev/null and b/sdks/guides/go/ai-agents/ai-agents differ diff --git a/sdks/guides/go/ai-agents/main.go b/sdks/guides/go/ai-agents/main.go new file mode 100644 index 0000000000..b85bd4b30f --- /dev/null +++ b/sdks/guides/go/ai-agents/main.go @@ -0,0 +1,75 @@ +package main + +import ( + "log" + + "github.com/hatchet-dev/hatchet/pkg/cmdutils" + hatchet "github.com/hatchet-dev/hatchet/sdks/go" +) + +func main() { + client, err := hatchet.NewClient() + if err != nil { + log.Fatalf("failed to create hatchet client: %v", err) + } + + // > Step 02 Reasoning Loop + agentReasoningLoop := func(query string) (map[string]interface{}, error) { + messages := []map[string]interface{}{{"role": "user", "content": query}} + for i := 0; i < 10; i++ { + resp := CallLLM(messages) + if resp.Done { + return map[string]interface{}{"response": resp.Content}, nil + } + for _, tc := range resp.ToolCalls { + args := make(map[string]interface{}) + for k, v := range tc.Args { + args[k] = v + } + result := RunTool(tc.Name, args) + messages = append(messages, map[string]interface{}{"role": "tool", "content": result}) + } + } + return map[string]interface{}{"response": "Max iterations reached"}, nil + } + // !! + + // > Step 01 Define Agent Task + agentTask := client.NewStandaloneDurableTask("reasoning-loop-agent", func(ctx hatchet.DurableContext, input map[string]interface{}) (map[string]interface{}, error) { + query := "Hello" + if q, ok := input["query"].(string); ok && q != "" { + query = q + } + return agentReasoningLoop(query) + }) + // !! + + // > Step 03 Stream Response + streamingTask := client.NewStandaloneDurableTask("streaming-agent-task", func(ctx hatchet.DurableContext, input map[string]interface{}) (map[string]interface{}, error) { + tokens := []string{"Hello", " ", "world", "!"} + for _, t := range tokens { + ctx.PutStream(t) + } + return map[string]interface{}{"done": true}, nil + }) + // !! + + // > Step 04 Run Worker + worker, err := client.NewWorker("agent-worker", + hatchet.WithWorkflows(agentTask, streamingTask), + hatchet.WithSlots(5), + hatchet.WithDurableSlots(5), + ) + if err != nil { + log.Fatalf("failed to create worker: %v", err) + } + + interruptCtx, cancel := cmdutils.NewInterruptContext() + defer cancel() + + if err := worker.StartBlocking(interruptCtx); err != nil { + cancel() + log.Fatalf("failed to start worker: %v", err) + } + // !! +} diff --git a/sdks/guides/go/ai-agents/mock_agent.go b/sdks/guides/go/ai-agents/mock_agent.go new file mode 100644 index 0000000000..c80da2cc1b --- /dev/null +++ b/sdks/guides/go/ai-agents/mock_agent.go @@ -0,0 +1,40 @@ +package main + +// CallLLM is a mock - no external LLM API. +// First call returns tool_calls; second returns final answer. +var llmCallCount int + +type LLMResponse struct { + Content string + ToolCalls []ToolCall + Done bool +} + +type ToolCall struct { + Name string + Args map[string]interface{} +} + +func CallLLM(messages []map[string]interface{}) LLMResponse { + llmCallCount++ + if llmCallCount == 1 { + return LLMResponse{ + Content: "", + ToolCalls: []ToolCall{{Name: "get_weather", Args: map[string]interface{}{"location": "SF"}}}, + Done: false, + } + } + return LLMResponse{Content: "It's 72°F and sunny in SF.", ToolCalls: nil, Done: true} +} + +// RunTool is a mock - returns canned results. +func RunTool(name string, args map[string]interface{}) string { + if name == "get_weather" { + loc := "unknown" + if v, ok := args["location"]; ok { + loc = v.(string) + } + return "Weather in " + loc + ": 72°F, sunny" + } + return "Unknown tool: " + name +} diff --git a/sdks/guides/go/batch-processing/batch-processing b/sdks/guides/go/batch-processing/batch-processing new file mode 100755 index 0000000000..d0a78fe3f4 Binary files /dev/null and b/sdks/guides/go/batch-processing/batch-processing differ diff --git a/sdks/guides/go/batch-processing/main.go b/sdks/guides/go/batch-processing/main.go new file mode 100644 index 0000000000..3f38e2a7b8 --- /dev/null +++ b/sdks/guides/go/batch-processing/main.go @@ -0,0 +1,70 @@ +package main + +import ( + "log" + + "github.com/hatchet-dev/hatchet/pkg/cmdutils" + hatchet "github.com/hatchet-dev/hatchet/sdks/go" +) + +type BatchInput struct { + Items []string `json:"items"` +} + +type ItemInput struct { + ItemID string `json:"item_id"` +} + +func main() { + client, err := hatchet.NewClient() + if err != nil { + log.Fatalf("failed to create hatchet client: %v", err) + } + + // > Step 03 Process Item + childTask := client.NewStandaloneTask("process-item", func(ctx hatchet.Context, input ItemInput) (map[string]string, error) { + return map[string]string{"status": "done", "item_id": input.ItemID}, nil + }) + // !! + + // > Step 01 Define Parent Task + parentTask := client.NewStandaloneDurableTask("spawn-children", func(ctx hatchet.DurableContext, input BatchInput) (map[string]interface{}, error) { + inputs := make([]hatchet.RunManyOpt, len(input.Items)) + for i, itemID := range input.Items { + inputs[i] = hatchet.RunManyOpt{Input: ItemInput{ItemID: itemID}} + } + runRefs, err := childTask.RunMany(ctx, inputs) + if err != nil { + return nil, err + } + results := make([]interface{}, len(runRefs)) + for i, ref := range runRefs { + result, err := ref.Result() + if err != nil { + return nil, err + } + var parsed map[string]interface{} + if err := result.TaskOutput("process-item").Into(&parsed); err != nil { + return nil, err + } + results[i] = parsed + } + return map[string]interface{}{"processed": len(results), "results": results}, nil + }) + // !! + + // > Step 04 Run Worker + worker, err := client.NewWorker("batch-worker", hatchet.WithWorkflows(parentTask, childTask), hatchet.WithSlots(20)) + if err != nil { + log.Fatalf("failed to create worker: %v", err) + } + + interruptCtx, cancel := cmdutils.NewInterruptContext() + defer cancel() + + if err := worker.StartBlocking(interruptCtx); err != nil { + cancel() + log.Fatalf("failed to start worker: %v", err) + } + // !! +} diff --git a/sdks/guides/go/document-processing/main.go b/sdks/guides/go/document-processing/main.go new file mode 100644 index 0000000000..848d6e93ee --- /dev/null +++ b/sdks/guides/go/document-processing/main.go @@ -0,0 +1,67 @@ +package main + +import ( + "log" + + "github.com/hatchet-dev/hatchet/pkg/cmdutils" + hatchet "github.com/hatchet-dev/hatchet/sdks/go" +) + +type DocInput struct { + DocID string `json:"doc_id"` + Content []byte `json:"content"` +} + +func main() { + client, err := hatchet.NewClient() + if err != nil { + log.Fatalf("failed to create hatchet client: %v", err) + } + + // > Step 01 Define DAG + workflow := client.NewWorkflow("DocumentPipeline") + // !! + + // > Step 02 Parse Stage + ingest := workflow.NewTask("ingest", func(ctx hatchet.Context, input DocInput) (map[string]interface{}, error) { + return map[string]interface{}{"doc_id": input.DocID, "content": input.Content}, nil + }) + + parse := workflow.NewTask("parse", func(ctx hatchet.Context, input DocInput) (map[string]interface{}, error) { + var ingested map[string]interface{} + if err := ctx.ParentOutput(ingest, &ingested); err != nil { + return nil, err + } + content := ingested["content"].([]byte) + text := parseDocument(content) + return map[string]interface{}{"doc_id": input.DocID, "text": text}, nil + }, hatchet.WithParents(ingest)) + // !! + + // > Step 03 Extract Stage + extract := workflow.NewTask("extract", func(ctx hatchet.Context, input DocInput) (map[string]interface{}, error) { + var parsed map[string]interface{} + if err := ctx.ParentOutput(parse, &parsed); err != nil { + return nil, err + } + return map[string]interface{}{"doc_id": parsed["doc_id"], "entities": []string{"entity1", "entity2"}}, nil + }, hatchet.WithParents(parse)) + // !! + + _ = extract + + // > Step 04 Run Worker + worker, err := client.NewWorker("document-worker", hatchet.WithWorkflows(workflow)) + if err != nil { + log.Fatalf("failed to create worker: %v", err) + } + + interruptCtx, cancel := cmdutils.NewInterruptContext() + defer cancel() + + if err := worker.StartBlocking(interruptCtx); err != nil { + cancel() + log.Fatalf("failed to start worker: %v", err) + } + // !! +} diff --git a/sdks/guides/go/document-processing/mock_ocr.go b/sdks/guides/go/document-processing/mock_ocr.go new file mode 100644 index 0000000000..f447473ab8 --- /dev/null +++ b/sdks/guides/go/document-processing/mock_ocr.go @@ -0,0 +1,8 @@ +package main + +import "fmt" + +// parseDocument is a mock - no external OCR dependency. +func parseDocument(content []byte) string { + return fmt.Sprintf("Parsed text from %d bytes", len(content)) +} diff --git a/sdks/guides/go/evaluator-optimizer/main.go b/sdks/guides/go/evaluator-optimizer/main.go new file mode 100644 index 0000000000..ce25bc35ef --- /dev/null +++ b/sdks/guides/go/evaluator-optimizer/main.go @@ -0,0 +1,111 @@ +package main + +import ( + "fmt" + "log" + + "github.com/hatchet-dev/hatchet/pkg/cmdutils" + hatchet "github.com/hatchet-dev/hatchet/sdks/go" +) + +type GeneratorInput struct { + Topic string `json:"topic"` + Audience string `json:"audience"` + PreviousDraft *string `json:"previous_draft,omitempty"` + Feedback *string `json:"feedback,omitempty"` +} + +type EvaluatorInput struct { + Draft string `json:"draft"` + Topic string `json:"topic"` + Audience string `json:"audience"` +} + +func main() { + client, err := hatchet.NewClient() + if err != nil { + log.Fatalf("failed to create hatchet client: %v", err) + } + + // > Step 01 Define Tasks + generatorTask := client.NewStandaloneTask("generate-draft", func(ctx hatchet.Context, input GeneratorInput) (map[string]interface{}, error) { + var prompt string + if input.Feedback != nil { + prompt = fmt.Sprintf("Improve this draft.\n\nDraft: %s\nFeedback: %s", *input.PreviousDraft, *input.Feedback) + } else { + prompt = fmt.Sprintf("Write a social media post about \"%s\" for %s. Under 100 words.", input.Topic, input.Audience) + } + return map[string]interface{}{"draft": MockGenerate(prompt)}, nil + }) + + evaluatorTask := client.NewStandaloneTask("evaluate-draft", func(ctx hatchet.Context, input EvaluatorInput) (map[string]interface{}, error) { + result := MockEvaluate(input.Draft) + return map[string]interface{}{"score": result.Score, "feedback": result.Feedback}, nil + }) + // !! + + // > Step 02 Optimization Loop + optimizerTask := client.NewStandaloneDurableTask("evaluator-optimizer", func(ctx hatchet.DurableContext, input map[string]interface{}) (map[string]interface{}, error) { + maxIterations := 3 + threshold := 0.8 + draft := "" + feedback := "" + topic := input["topic"].(string) + audience := input["audience"].(string) + + for i := 0; i < maxIterations; i++ { + genInput := GeneratorInput{Topic: topic, Audience: audience} + if draft != "" { + genInput.PreviousDraft = &draft + } + if feedback != "" { + genInput.Feedback = &feedback + } + genResult, err := generatorTask.Run(ctx, genInput) + if err != nil { + return nil, err + } + var genData map[string]interface{} + if err := genResult.Into(&genData); err != nil { + return nil, err + } + draft = genData["draft"].(string) + + evalResult, err := evaluatorTask.Run(ctx, EvaluatorInput{Draft: draft, Topic: topic, Audience: audience}) + if err != nil { + return nil, err + } + var evalData map[string]interface{} + if err := evalResult.Into(&evalData); err != nil { + return nil, err + } + + score := evalData["score"].(float64) + if score >= threshold { + return map[string]interface{}{"draft": draft, "iterations": i + 1, "score": score}, nil + } + feedback = evalData["feedback"].(string) + } + + return map[string]interface{}{"draft": draft, "iterations": maxIterations, "score": -1}, nil + }) + // !! + + // > Step 03 Run Worker + worker, err := client.NewWorker("evaluator-optimizer-worker", + hatchet.WithWorkflows(generatorTask, evaluatorTask, optimizerTask), + hatchet.WithSlots(5), + hatchet.WithDurableSlots(5), + ) + if err != nil { + log.Fatalf("failed to create worker: %v", err) + } + + interruptCtx, cancel := cmdutils.NewInterruptContext() + defer cancel() + + if err := worker.StartBlocking(interruptCtx); err != nil { + log.Fatalf("failed to start worker: %v", err) + } + // !! +} diff --git a/sdks/guides/go/evaluator-optimizer/mock_llm.go b/sdks/guides/go/evaluator-optimizer/mock_llm.go new file mode 100644 index 0000000000..92028718d4 --- /dev/null +++ b/sdks/guides/go/evaluator-optimizer/mock_llm.go @@ -0,0 +1,23 @@ +package main + +var generateCount int + +func MockGenerate(prompt string) string { + generateCount++ + if generateCount == 1 { + return "Check out our product! Buy now!" + } + return "Discover how our tool saves teams 10 hours/week. Try it free." +} + +type EvalResult struct { + Score float64 + Feedback string +} + +func MockEvaluate(draft string) EvalResult { + if len(draft) < 40 { + return EvalResult{Score: 0.4, Feedback: "Too short and pushy. Add a specific benefit and soften the CTA."} + } + return EvalResult{Score: 0.9, Feedback: "Clear value prop, appropriate tone."} +} diff --git a/sdks/guides/go/event-driven/main.go b/sdks/guides/go/event-driven/main.go new file mode 100644 index 0000000000..fe012dae99 --- /dev/null +++ b/sdks/guides/go/event-driven/main.go @@ -0,0 +1,56 @@ +package main + +import ( + "context" + "log" + + "github.com/hatchet-dev/hatchet/pkg/cmdutils" + hatchet "github.com/hatchet-dev/hatchet/sdks/go" +) + +type EventInput struct { + Message string `json:"message"` + Source string `json:"source"` +} + +func main() { + client, err := hatchet.NewClient() + if err != nil { + log.Fatalf("failed to create hatchet client: %v", err) + } + + // > Step 01 Define Event Task + task := client.NewStandaloneTask("process-event", func(ctx hatchet.Context, input EventInput) (map[string]string, error) { + source := input.Source + if source == "" { + source = "api" + } + return map[string]string{"processed": input.Message, "source": source}, nil + }, hatchet.WithWorkflowEvents("order:created", "user:signup")) + // !! + + // > Step 02 Register Event Trigger + // Push an event from your app. Call this from your webhook handler or API. + pushEvent := func() { + _ = client.Events().Push(context.Background(), "order:created", map[string]interface{}{ + "message": "Order #1234", + "source": "webhook", + }) + } + _ = pushEvent + // !! + + // > Step 04 Run Worker + worker, err := client.NewWorker("event-driven-worker", hatchet.WithWorkflows(task)) + if err != nil { + log.Fatalf("failed to create worker: %v", err) + } + + interruptCtx, cancel := cmdutils.NewInterruptContext() + defer cancel() + + if err := worker.StartBlocking(interruptCtx); err != nil { + log.Fatalf("failed to start worker: %v", err) + } + // !! +} diff --git a/sdks/guides/go/event-driven/trigger.go b/sdks/guides/go/event-driven/trigger.go new file mode 100644 index 0000000000..3d5b7cc0fb --- /dev/null +++ b/sdks/guides/go/event-driven/trigger.go @@ -0,0 +1,18 @@ +package main + +import ( + "context" + + hatchet "github.com/hatchet-dev/hatchet/sdks/go" +) + +// > Step 03 Push Event +// Push an event to trigger the workflow. Use the same key as WithWorkflowEvents. +func pushEvent(client *hatchet.Client) { + _ = client.Events().Push(context.Background(), "order:created", map[string]interface{}{ + "message": "Order #1234", + "source": "webhook", + }) +} + +// !! diff --git a/sdks/guides/go/go.mod b/sdks/guides/go/go.mod new file mode 100644 index 0000000000..1bbc4dc82e --- /dev/null +++ b/sdks/guides/go/go.mod @@ -0,0 +1,81 @@ +module github.com/hatchet-dev/hatchet/sdks/guides/go + +go 1.25.0 + +require ( + github.com/hatchet-dev/hatchet v0.0.0 + github.com/sashabaranov/go-openai v1.28.0 +) + +require ( + cel.dev/expr v0.25.1 // indirect + github.com/Masterminds/semver/v3 v3.4.0 // indirect + github.com/antlr4-go/antlr/v4 v4.13.1 // indirect + github.com/apapsch/go-jsonmerge/v2 v2.0.0 // indirect + github.com/cockroachdb/errors v1.12.0 // indirect + github.com/cockroachdb/logtags v0.0.0-20230118201751-21c54148d20b // indirect + github.com/cockroachdb/redact v1.1.5 // indirect + github.com/creasty/defaults v1.8.0 // indirect + github.com/fsnotify/fsnotify v1.9.0 // indirect + github.com/gabriel-vasile/mimetype v1.4.12 // indirect + github.com/getkin/kin-openapi v0.133.0 // indirect + github.com/getsentry/sentry-go v0.43.0 // indirect + github.com/go-openapi/jsonpointer v0.21.0 // indirect + github.com/go-openapi/swag v0.23.0 // indirect + github.com/go-playground/locales v0.14.1 // indirect + github.com/go-playground/universal-translator v0.18.1 // indirect + github.com/go-playground/validator/v10 v10.30.1 // indirect + github.com/go-viper/mapstructure/v2 v2.4.0 // indirect + github.com/gogo/protobuf v1.3.2 // indirect + github.com/google/cel-go v0.27.0 // indirect + github.com/google/uuid v1.6.0 // indirect + github.com/grpc-ecosystem/go-grpc-middleware/v2 v2.3.3 // indirect + github.com/hashicorp/errwrap v1.1.0 // indirect + github.com/hashicorp/go-multierror v1.1.1 // indirect + github.com/jackc/pgpassfile v1.0.0 // indirect + github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 // indirect + github.com/jackc/pgx/v5 v5.8.0 // indirect + github.com/josharian/intern v1.0.0 // indirect + github.com/kr/pretty v0.3.1 // indirect + github.com/kr/text v0.2.0 // indirect + github.com/labstack/echo/v4 v4.15.1 // indirect + github.com/labstack/gommon v0.4.2 // indirect + github.com/leodido/go-urn v1.4.0 // indirect + github.com/mailru/easyjson v0.7.7 // indirect + github.com/mattn/go-colorable v0.1.14 // indirect + github.com/mattn/go-isatty v0.0.20 // indirect + github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826 // indirect + github.com/oapi-codegen/runtime v1.2.0 // indirect + github.com/oasdiff/yaml v0.0.0-20250309154309-f31be36b4037 // indirect + github.com/oasdiff/yaml3 v0.0.0-20250309153720-d2182401db90 // indirect + github.com/pelletier/go-toml/v2 v2.2.4 // indirect + github.com/perimeterx/marshmallow v1.1.5 // indirect + github.com/pkg/errors v0.9.1 // indirect + github.com/robfig/cron/v3 v3.0.1 // indirect + github.com/rogpeppe/go-internal v1.14.1 // indirect + github.com/rs/zerolog v1.34.0 // indirect + github.com/sagikazarmark/locafero v0.11.0 // indirect + github.com/sourcegraph/conc v0.3.1-0.20240121214520-5f936abd7ae8 // indirect + github.com/spf13/afero v1.15.0 // indirect + github.com/spf13/cast v1.10.0 // indirect + github.com/spf13/pflag v1.0.10 // indirect + github.com/spf13/viper v1.21.0 // indirect + github.com/subosito/gotenv v1.6.0 // indirect + github.com/valyala/bytebufferpool v1.0.0 // indirect + github.com/valyala/fasttemplate v1.2.2 // indirect + github.com/woodsbury/decimal128 v1.3.0 // indirect + go.yaml.in/yaml/v3 v3.0.4 // indirect + golang.org/x/crypto v0.48.0 // indirect + golang.org/x/exp v0.0.0-20260218203240-3dfff04db8fa // indirect + golang.org/x/net v0.50.0 // indirect + golang.org/x/sync v0.19.0 // indirect + golang.org/x/sys v0.41.0 // indirect + golang.org/x/text v0.34.0 // indirect + google.golang.org/genproto/googleapis/api v0.0.0-20260209200024-4cfbd4190f57 // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20260217215200-42d3e9bedb6d // indirect + google.golang.org/grpc v1.79.1 // indirect + google.golang.org/protobuf v1.36.11 // indirect + gopkg.in/yaml.v3 v3.0.1 // indirect +) + +replace github.com/hatchet-dev/hatchet => ../../.. diff --git a/sdks/guides/go/go.sum b/sdks/guides/go/go.sum new file mode 100644 index 0000000000..c2e139771b --- /dev/null +++ b/sdks/guides/go/go.sum @@ -0,0 +1,259 @@ +cel.dev/expr v0.25.1 h1:1KrZg61W6TWSxuNZ37Xy49ps13NUovb66QLprthtwi4= +cel.dev/expr v0.25.1/go.mod h1:hrXvqGP6G6gyx8UAHSHJ5RGk//1Oj5nXQ2NI02Nrsg4= +github.com/Masterminds/semver/v3 v3.4.0 h1:Zog+i5UMtVoCU8oKka5P7i9q9HgrJeGzI9SA1Xbatp0= +github.com/Masterminds/semver/v3 v3.4.0/go.mod h1:4V+yj/TJE1HU9XfppCwVMZq3I84lprf4nC11bSS5beM= +github.com/RaveNoX/go-jsoncommentstrip v1.0.0/go.mod h1:78ihd09MekBnJnxpICcwzCMzGrKSKYe4AqU6PDYYpjk= +github.com/antlr4-go/antlr/v4 v4.13.1 h1:SqQKkuVZ+zWkMMNkjy5FZe5mr5WURWnlpmOuzYWrPrQ= +github.com/antlr4-go/antlr/v4 v4.13.1/go.mod h1:GKmUxMtwp6ZgGwZSva4eWPC5mS6vUAmOABFgjdkM7Nw= +github.com/apapsch/go-jsonmerge/v2 v2.0.0 h1:axGnT1gRIfimI7gJifB699GoE/oq+F2MU7Dml6nw9rQ= +github.com/apapsch/go-jsonmerge/v2 v2.0.0/go.mod h1:lvDnEdqiQrp0O42VQGgmlKpxL1AP2+08jFMw88y4klk= +github.com/bmatcuk/doublestar v1.1.1/go.mod h1:UD6OnuiIn0yFxxA2le/rnRU1G4RaI4UvFv1sNto9p6w= +github.com/cenkalti/backoff/v5 v5.0.3 h1:ZN+IMa753KfX5hd8vVaMixjnqRZ3y8CuJKRKj1xcsSM= +github.com/cenkalti/backoff/v5 v5.0.3/go.mod h1:rkhZdG3JZukswDf7f0cwqPNk4K0sa+F97BxZthm/crw= +github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs= +github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/cockroachdb/errors v1.12.0 h1:d7oCs6vuIMUQRVbi6jWWWEJZahLCfJpnJSVobd1/sUo= +github.com/cockroachdb/errors v1.12.0/go.mod h1:SvzfYNNBshAVbZ8wzNc/UPK3w1vf0dKDUP41ucAIf7g= +github.com/cockroachdb/logtags v0.0.0-20230118201751-21c54148d20b h1:r6VH0faHjZeQy818SGhaone5OnYfxFR/+AzdY3sf5aE= +github.com/cockroachdb/logtags v0.0.0-20230118201751-21c54148d20b/go.mod h1:Vz9DsVWQQhf3vs21MhPMZpMGSht7O/2vFW2xusFUVOs= +github.com/cockroachdb/redact v1.1.5 h1:u1PMllDkdFfPWaNGMyLD1+so+aq3uUItthCFqzwPJ30= +github.com/cockroachdb/redact v1.1.5/go.mod h1:BVNblN9mBWFyMyqK1k3AAiSxhvhfK2oOZZ2lK+dpvRg= +github.com/coreos/go-systemd/v22 v22.5.0/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= +github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= +github.com/creasty/defaults v1.8.0 h1:z27FJxCAa0JKt3utc0sCImAEb+spPucmKoOdLHvHYKk= +github.com/creasty/defaults v1.8.0/go.mod h1:iGzKe6pbEHnpMPtfDXZEr0NVxWnPTjb1bbDy08fPzYM= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM= +github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/frankban/quicktest v1.14.6 h1:7Xjx+VpznH+oBnejlPUj8oUpdxnVs4f8XU8WnHkI4W8= +github.com/frankban/quicktest v1.14.6/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0= +github.com/fsnotify/fsnotify v1.9.0 h1:2Ml+OJNzbYCTzsxtv8vKSFD9PbJjmhYF14k/jKC7S9k= +github.com/fsnotify/fsnotify v1.9.0/go.mod h1:8jBTzvmWwFyi3Pb8djgCCO5IBqzKJ/Jwo8TRcHyHii0= +github.com/gabriel-vasile/mimetype v1.4.12 h1:e9hWvmLYvtp846tLHam2o++qitpguFiYCKbn0w9jyqw= +github.com/gabriel-vasile/mimetype v1.4.12/go.mod h1:d+9Oxyo1wTzWdyVUPMmXFvp4F9tea18J8ufA774AB3s= +github.com/getkin/kin-openapi v0.133.0 h1:pJdmNohVIJ97r4AUFtEXRXwESr8b0bD721u/Tz6k8PQ= +github.com/getkin/kin-openapi v0.133.0/go.mod h1:boAciF6cXk5FhPqe/NQeBTeenbjqU4LhWBf09ILVvWE= +github.com/getsentry/sentry-go v0.43.0 h1:XbXLpFicpo8HmBDaInk7dum18G9KSLcjZiyUKS+hLW4= +github.com/getsentry/sentry-go v0.43.0/go.mod h1:XDotiNZbgf5U8bPDUAfvcFmOnMQQceESxyKaObSssW0= +github.com/go-errors/errors v1.4.2 h1:J6MZopCL4uSllY1OfXM374weqZFFItUbrImctkmUxIA= +github.com/go-errors/errors v1.4.2/go.mod h1:sIVyrIiJhuEF+Pj9Ebtd6P/rEYROXFi3BopGUQ5a5Og= +github.com/go-logr/logr v1.4.3 h1:CjnDlHq8ikf6E492q6eKboGOC0T8CDaOvkHCIg8idEI= +github.com/go-logr/logr v1.4.3/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= +github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= +github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= +github.com/go-openapi/jsonpointer v0.21.0 h1:YgdVicSA9vH5RiHs9TZW5oyafXZFc6+2Vc1rr/O9oNQ= +github.com/go-openapi/jsonpointer v0.21.0/go.mod h1:IUyH9l/+uyhIYQ/PXVA41Rexl+kOkAPDdXEYns6fzUY= +github.com/go-openapi/swag v0.23.0 h1:vsEVJDUo2hPJ2tu0/Xc+4noaxyEffXNIs3cOULZ+GrE= +github.com/go-openapi/swag v0.23.0/go.mod h1:esZ8ITTYEsH1V2trKHjAN8Ai7xHb8RV+YSZ577vPjgQ= +github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s= +github.com/go-playground/assert/v2 v2.2.0/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4= +github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA= +github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY= +github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY= +github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY= +github.com/go-playground/validator/v10 v10.30.1 h1:f3zDSN/zOma+w6+1Wswgd9fLkdwy06ntQJp0BBvFG0w= +github.com/go-playground/validator/v10 v10.30.1/go.mod h1:oSuBIQzuJxL//3MelwSLD5hc2Tu889bF0Idm9Dg26cM= +github.com/go-test/deep v1.0.8 h1:TDsG77qcSprGbC6vTN8OuXp5g+J+b5Pcguhf7Zt61VM= +github.com/go-test/deep v1.0.8/go.mod h1:5C2ZWiW0ErCdrYzpqxLbTX7MG14M9iiw8DgHncVwcsE= +github.com/go-viper/mapstructure/v2 v2.4.0 h1:EBsztssimR/CONLSZZ04E8qAkxNYq4Qp9LvH92wZUgs= +github.com/go-viper/mapstructure/v2 v2.4.0/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM= +github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= +github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= +github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= +github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek= +github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps= +github.com/google/cel-go v0.27.0 h1:e7ih85+4qVrBuqQWTW4FKSqZYokVuc3HnhH5keboFTo= +github.com/google/cel-go v0.27.0/go.mod h1:tTJ11FWqnhw5KKpnWpvW9CJC3Y9GK4EIS0WXnBbebzw= +github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= +github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU= +github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= +github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/grpc-ecosystem/go-grpc-middleware/v2 v2.3.3 h1:B+8ClL/kCQkRiU82d9xajRPKYMrB7E0MbtzWVi1K4ns= +github.com/grpc-ecosystem/go-grpc-middleware/v2 v2.3.3/go.mod h1:NbCUVmiS4foBGBHOYlCT25+YmGpJ32dZPi75pGEUpj4= +github.com/grpc-ecosystem/grpc-gateway/v2 v2.28.0 h1:HWRh5R2+9EifMyIHV7ZV+MIZqgz+PMpZ14Jynv3O2Zs= +github.com/grpc-ecosystem/grpc-gateway/v2 v2.28.0/go.mod h1:JfhWUomR1baixubs02l85lZYYOm7LV6om4ceouMv45c= +github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= +github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I= +github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= +github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo= +github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM= +github.com/hashicorp/golang-lru/v2 v2.0.7 h1:a+bsQ5rvGLjzHuww6tVxozPZFVghXaHOwFs4luLUK2k= +github.com/hashicorp/golang-lru/v2 v2.0.7/go.mod h1:QeFd9opnmA6QUJc5vARoKUSoFhyfM2/ZepoAG6RGpeM= +github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM= +github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg= +github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 h1:iCEnooe7UlwOQYpKFhBabPMi4aNAfoODPEFNiAnClxo= +github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761/go.mod h1:5TJZWKEWniPve33vlWYSoGYefn3gLQRzjfDlhSJ9ZKM= +github.com/jackc/pgx/v5 v5.8.0 h1:TYPDoleBBme0xGSAX3/+NujXXtpZn9HBONkQC7IEZSo= +github.com/jackc/pgx/v5 v5.8.0/go.mod h1:QVeDInX2m9VyzvNeiCJVjCkNFqzsNb43204HshNSZKw= +github.com/jackc/pgxlisten v0.0.0-20241106001234-1d6f6656415c h1:bTgmg761ac9Ki27HoLx8IBvc+T+Qj6eptBpKahKIRT4= +github.com/jackc/pgxlisten v0.0.0-20241106001234-1d6f6656415c/go.mod h1:N4E1APLOYrbM11HH5kdqAjDa8RJWVwD3JqWpvH22h64= +github.com/jackc/puddle/v2 v2.2.2 h1:PR8nw+E/1w0GLuRFSmiioY6UooMp6KJv0/61nB7icHo= +github.com/jackc/puddle/v2 v2.2.2/go.mod h1:vriiEXHvEE654aYKXXjOvZM39qJ0q+azkZFrfEOc3H4= +github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY= +github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y= +github.com/juju/gnuflag v0.0.0-20171113085948-2ce1bb71843d/go.mod h1:2PavIy+JPciBPrBUjwbNvtwB6RQlve+hkpll6QSNmOE= +github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= +github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= +github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= +github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= +github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= +github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/labstack/echo/v4 v4.15.1 h1:S9keusg26gZpjMmPqB5hOEvNKnmd1lNmcHrbbH2lnFs= +github.com/labstack/echo/v4 v4.15.1/go.mod h1:xmw1clThob0BSVRX1CRQkGQ/vjwcpOMjQZSZa9fKA/c= +github.com/labstack/gommon v0.4.2 h1:F8qTUNXgG1+6WQmqoUWnz8WiEU60mXVVw0P4ht1WRA0= +github.com/labstack/gommon v0.4.2/go.mod h1:QlUFxVM+SNXhDL/Z7YhocGIBYOiwB0mXm1+1bAPHPyU= +github.com/leodido/go-urn v1.4.0 h1:WT9HwE9SGECu3lg4d/dIA+jxlljEa1/ffXKmRjqdmIQ= +github.com/leodido/go-urn v1.4.0/go.mod h1:bvxc+MVxLKB4z00jd1z+Dvzr47oO32F/QSNjSBOlFxI= +github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0= +github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= +github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= +github.com/mattn/go-colorable v0.1.14 h1:9A9LHSqF/7dyVVX6g0U9cwm9pG3kP9gSzcuIPHPsaIE= +github.com/mattn/go-colorable v0.1.14/go.mod h1:6LmQG8QLFO4G5z1gPvYEzlUgJ2wF+stgPZH1UqBm1s8= +github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= +github.com/mattn/go-isatty v0.0.19/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= +github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= +github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= +github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY= +github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= +github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826 h1:RWengNIwukTxcDr9M+97sNutRR1RKhG96O6jWumTTnw= +github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826/go.mod h1:TaXosZuwdSHYgviHp1DAtfrULt5eUgsSMsZf+YrPgl8= +github.com/oapi-codegen/runtime v1.2.0 h1:RvKc1CVS1QeKSNzO97FBQbSMZyQ8s6rZd+LpmzwHMP4= +github.com/oapi-codegen/runtime v1.2.0/go.mod h1:Y7ZhmmlE8ikZOmuHRRndiIm7nf3xcVv+YMweKgG1DT0= +github.com/oasdiff/yaml v0.0.0-20250309154309-f31be36b4037 h1:G7ERwszslrBzRxj//JalHPu/3yz+De2J+4aLtSRlHiY= +github.com/oasdiff/yaml v0.0.0-20250309154309-f31be36b4037/go.mod h1:2bpvgLBZEtENV5scfDFEtB/5+1M4hkQhDQrccEJ/qGw= +github.com/oasdiff/yaml3 v0.0.0-20250309153720-d2182401db90 h1:bQx3WeLcUWy+RletIKwUIt4x3t8n2SxavmoclizMb8c= +github.com/oasdiff/yaml3 v0.0.0-20250309153720-d2182401db90/go.mod h1:y5+oSEHCPT/DGrS++Wc/479ERge0zTFxaF8PbGKcg2o= +github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U= +github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM= +github.com/pelletier/go-toml/v2 v2.2.4 h1:mye9XuhQ6gvn5h28+VilKrrPoQVanw5PMw/TB0t5Ec4= +github.com/pelletier/go-toml/v2 v2.2.4/go.mod h1:2gIqNv+qfxSVS7cM2xJQKtLSTLUE9V8t9Stt+h56mCY= +github.com/perimeterx/marshmallow v1.1.5 h1:a2LALqQ1BlHM8PZblsDdidgv1mWi1DgC2UmX50IvK2s= +github.com/perimeterx/marshmallow v1.1.5/go.mod h1:dsXbUu8CRzfYP5a87xpp0xq9S3u0Vchtcl8we9tYaXw= +github.com/pingcap/errors v0.11.4 h1:lFuQV/oaUMGcD2tqt+01ROSmJs75VG1ToEOkZIZ4nE4= +github.com/pingcap/errors v0.11.4/go.mod h1:Oi8TUi2kEtXXLMJk9l1cGmz20kV3TaQ0usTwv5KuLY8= +github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA= +github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= +github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U= +github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/robfig/cron/v3 v3.0.1 h1:WdRxkvbJztn8LMz/QEvLN5sBU+xKpSqwwUO1Pjr4qDs= +github.com/robfig/cron/v3 v3.0.1/go.mod h1:eQICP3HwyT7UooqI/z+Ov+PtYAWygg1TEWWzGIFLtro= +github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs= +github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ= +github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc= +github.com/rs/xid v1.6.0/go.mod h1:7XoLgs4eV+QndskICGsho+ADou8ySMSjJKDIan90Nz0= +github.com/rs/zerolog v1.34.0 h1:k43nTLIwcTVQAncfCw4KZ2VY6ukYoZaBPNOE8txlOeY= +github.com/rs/zerolog v1.34.0/go.mod h1:bJsvje4Z08ROH4Nhs5iH600c3IkWhwp44iRc54W6wYQ= +github.com/sagikazarmark/locafero v0.11.0 h1:1iurJgmM9G3PA/I+wWYIOw/5SyBtxapeHDcg+AAIFXc= +github.com/sagikazarmark/locafero v0.11.0/go.mod h1:nVIGvgyzw595SUSUE6tvCp3YYTeHs15MvlmU87WwIik= +github.com/sashabaranov/go-openai v1.28.0 h1:WS9F9BriSvtHvknPQy2Oi3b+8zkmJdEXcycrWqrSicQ= +github.com/sashabaranov/go-openai v1.28.0/go.mod h1:lj5b/K+zjTSFxVLijLSTDZuP7adOgerWeFyZLUhAKRg= +github.com/sourcegraph/conc v0.3.1-0.20240121214520-5f936abd7ae8 h1:+jumHNA0Wrelhe64i8F6HNlS8pkoyMv5sreGx2Ry5Rw= +github.com/sourcegraph/conc v0.3.1-0.20240121214520-5f936abd7ae8/go.mod h1:3n1Cwaq1E1/1lhQhtRK2ts/ZwZEhjcQeJQ1RuC6Q/8U= +github.com/spf13/afero v1.15.0 h1:b/YBCLWAJdFWJTN9cLhiXXcD7mzKn9Dm86dNnfyQw1I= +github.com/spf13/afero v1.15.0/go.mod h1:NC2ByUVxtQs4b3sIUphxK0NioZnmxgyCrfzeuq8lxMg= +github.com/spf13/cast v1.10.0 h1:h2x0u2shc1QuLHfxi+cTJvs30+ZAHOGRic8uyGTDWxY= +github.com/spf13/cast v1.10.0/go.mod h1:jNfB8QC9IA6ZuY2ZjDp0KtFO2LZZlg4S/7bzP6qqeHo= +github.com/spf13/pflag v1.0.10 h1:4EBh2KAYBwaONj6b2Ye1GiHfwjqyROoF4RwYO+vPwFk= +github.com/spf13/pflag v1.0.10/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= +github.com/spf13/viper v1.21.0 h1:x5S+0EU27Lbphp4UKm1C+1oQO+rKx36vfCoaVebLFSU= +github.com/spf13/viper v1.21.0/go.mod h1:P0lhsswPGWD/1lZJ9ny3fYnVqxiegrlNrEmgLjbTCAY= +github.com/spkg/bom v0.0.0-20160624110644-59b7046e48ad/go.mod h1:qLr4V1qq6nMqFKkMo8ZTx3f+BZEkzsRUY10Xsm2mwU0= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= +github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U= +github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U= +github.com/subosito/gotenv v1.6.0 h1:9NlTDc1FTs4qu0DDq7AEtTPNw6SVm7uBMsUCUjABIf8= +github.com/subosito/gotenv v1.6.0/go.mod h1:Dk4QP5c2W3ibzajGcXpNraDfq2IrhjMIvMSWPKKo0FU= +github.com/ugorji/go/codec v1.2.12 h1:9LC83zGrHhuUA9l16C9AHXAqEV/2wBQ4nkvumAE65EE= +github.com/ugorji/go/codec v1.2.12/go.mod h1:UNopzCgEMSXjBc6AOMqYvWC1ktqTAfzJZUZgYf6w6lg= +github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw= +github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc= +github.com/valyala/fasttemplate v1.2.2 h1:lxLXG0uE3Qnshl9QyaK6XJxMXlQZELvChBOCmQD0Loo= +github.com/valyala/fasttemplate v1.2.2/go.mod h1:KHLXt3tVN2HBp8eijSv/kGJopbvo7S+qRAEEKiv+SiQ= +github.com/woodsbury/decimal128 v1.3.0 h1:8pffMNWIlC0O5vbyHWFZAt5yWvWcrHA+3ovIIjVWss0= +github.com/woodsbury/decimal128 v1.3.0/go.mod h1:C5UTmyTjW3JftjUFzOVhC20BEQa2a4ZKOB5I6Zjb+ds= +github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +go.opentelemetry.io/auto/sdk v1.2.1 h1:jXsnJ4Lmnqd11kwkBV2LgLoFMZKizbCi5fNZ/ipaZ64= +go.opentelemetry.io/auto/sdk v1.2.1/go.mod h1:KRTj+aOaElaLi+wW1kO/DZRXwkF4C5xPbEe3ZiIhN7Y= +go.opentelemetry.io/otel v1.41.0 h1:YlEwVsGAlCvczDILpUXpIpPSL/VPugt7zHThEMLce1c= +go.opentelemetry.io/otel v1.41.0/go.mod h1:Yt4UwgEKeT05QbLwbyHXEwhnjxNO6D8L5PQP51/46dE= +go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v1.41.0 h1:VO3BL6OZXRQ1yQc8W6EVfJzINeJ35BkiHx4MYfoQf44= +go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v1.41.0/go.mod h1:qRDnJ2nv3CQXMK2HUd9K9VtvedsPAce3S+/4LZHjX/s= +go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.41.0 h1:ao6Oe+wSebTlQ1OEht7jlYTzQKE+pnx/iNywFvTbuuI= +go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.41.0/go.mod h1:u3T6vz0gh/NVzgDgiwkgLxpsSF6PaPmo2il0apGJbls= +go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.41.0 h1:mq/Qcf28TWz719lE3/hMB4KkyDuLJIvgJnFGcd0kEUI= +go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.41.0/go.mod h1:yk5LXEYhsL2htyDNJbEq7fWzNEigeEdV5xBF/Y+kAv0= +go.opentelemetry.io/otel/metric v1.41.0 h1:rFnDcs4gRzBcsO9tS8LCpgR0dxg4aaxWlJxCno7JlTQ= +go.opentelemetry.io/otel/metric v1.41.0/go.mod h1:xPvCwd9pU0VN8tPZYzDZV/BMj9CM9vs00GuBjeKhJps= +go.opentelemetry.io/otel/sdk v1.41.0 h1:YPIEXKmiAwkGl3Gu1huk1aYWwtpRLeskpV+wPisxBp8= +go.opentelemetry.io/otel/sdk v1.41.0/go.mod h1:ahFdU0G5y8IxglBf0QBJXgSe7agzjE4GiTJ6HT9ud90= +go.opentelemetry.io/otel/sdk/metric v1.41.0 h1:siZQIYBAUd1rlIWQT2uCxWJxcCO7q3TriaMlf08rXw8= +go.opentelemetry.io/otel/sdk/metric v1.41.0/go.mod h1:HNBuSvT7ROaGtGI50ArdRLUnvRTRGniSUZbxiWxSO8Y= +go.opentelemetry.io/otel/trace v1.41.0 h1:Vbk2co6bhj8L59ZJ6/xFTskY+tGAbOnCtQGVVa9TIN0= +go.opentelemetry.io/otel/trace v1.41.0/go.mod h1:U1NU4ULCoxeDKc09yCWdWe+3QoyweJcISEVa1RBzOis= +go.opentelemetry.io/proto/otlp v1.9.0 h1:l706jCMITVouPOqEnii2fIAuO3IVGBRPV5ICjceRb/A= +go.opentelemetry.io/proto/otlp v1.9.0/go.mod h1:xE+Cx5E/eEHw+ISFkwPLwCZefwVjY+pqKg1qcK03+/4= +go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto= +go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE= +go.yaml.in/yaml/v3 v3.0.4 h1:tfq32ie2Jv2UxXFdLJdh3jXuOzWiL1fo0bu/FbuKpbc= +go.yaml.in/yaml/v3 v3.0.4/go.mod h1:DhzuOOF2ATzADvBadXxruRBLzYTpT36CKvDb3+aBEFg= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.48.0 h1:/VRzVqiRSggnhY7gNRxPauEQ5Drw9haKdM0jqfcCFts= +golang.org/x/crypto v0.48.0/go.mod h1:r0kV5h3qnFPlQnBSrULhlsRfryS2pmewsg+XfMgkVos= +golang.org/x/exp v0.0.0-20260218203240-3dfff04db8fa h1:Zt3DZoOFFYkKhDT3v7Lm9FDMEV06GpzjG2jrqW+QTE0= +golang.org/x/exp v0.0.0-20260218203240-3dfff04db8fa/go.mod h1:K79w1Vqn7PoiZn+TkNpx3BUWUQksGO3JcVX6qIjytmA= +golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.50.0 h1:ucWh9eiCGyDR3vtzso0WMQinm2Dnt8cFMuQa9K33J60= +golang.org/x/net v0.50.0/go.mod h1:UgoSli3F/pBgdJBHCTc+tp3gmrU4XswgGRgtnwWTfyM= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.19.0 h1:vV+1eWNmZ5geRlYjzm2adRgW2/mcpevXNg50YZtPCE4= +golang.org/x/sync v0.19.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.41.0 h1:Ivj+2Cp/ylzLiEU89QhWblYnOE9zerudt9Ftecq2C6k= +golang.org/x/sys v0.41.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.34.0 h1:oL/Qq0Kdaqxa1KbNeMKwQq0reLCCaFtqu2eNuSeNHbk= +golang.org/x/text v0.34.0/go.mod h1:homfLqTYRFyVYemLBFl5GgL/DWEiH5wcsQ5gSh1yziA= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +gonum.org/v1/gonum v0.16.0 h1:5+ul4Swaf3ESvrOnidPp4GZbzf0mxVQpDCYUQE7OJfk= +gonum.org/v1/gonum v0.16.0/go.mod h1:fef3am4MQ93R2HHpKnLk4/Tbh/s0+wqD5nfa6Pnwy4E= +google.golang.org/genproto/googleapis/api v0.0.0-20260209200024-4cfbd4190f57 h1:JLQynH/LBHfCTSbDWl+py8C+Rg/k1OVH3xfcaiANuF0= +google.golang.org/genproto/googleapis/api v0.0.0-20260209200024-4cfbd4190f57/go.mod h1:kSJwQxqmFXeo79zOmbrALdflXQeAYcUbgS7PbpMknCY= +google.golang.org/genproto/googleapis/rpc v0.0.0-20260217215200-42d3e9bedb6d h1:t/LOSXPJ9R0B6fnZNyALBRfZBH0Uy0gT+uR+SJ6syqQ= +google.golang.org/genproto/googleapis/rpc v0.0.0-20260217215200-42d3e9bedb6d/go.mod h1:4Hqkh8ycfw05ld/3BWL7rJOSfebL2Q+DVDeRgYgxUU8= +google.golang.org/grpc v1.79.1 h1:zGhSi45ODB9/p3VAawt9a+O/MULLl9dpizzNNpq7flY= +google.golang.org/grpc v1.79.1/go.mod h1:KmT0Kjez+0dde/v2j9vzwoAScgEPx/Bw1CYChhHLrHQ= +google.golang.org/protobuf v1.36.11 h1:fV6ZwhNocDyBLK0dj+fg8ektcVegBBuEolpbTQyBNVE= +google.golang.org/protobuf v1.36.11/go.mod h1:HTf+CrKn2C3g5S8VImy6tdcUvCska2kB7j23XfzDpco= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/sdks/guides/go/human-in-the-loop/main.go b/sdks/guides/go/human-in-the-loop/main.go new file mode 100644 index 0000000000..35e513d47c --- /dev/null +++ b/sdks/guides/go/human-in-the-loop/main.go @@ -0,0 +1,80 @@ +package main + +import ( + "fmt" + "log" + + "github.com/hatchet-dev/hatchet/pkg/cmdutils" + hatchet "github.com/hatchet-dev/hatchet/sdks/go" +) + +type ApprovalInput struct { + Action string `json:"action"` + To string `json:"to"` +} + +type ApprovalOutput struct { + Status string `json:"status"` + Action interface{} `json:"action,omitempty"` + Reason string `json:"reason,omitempty"` +} + +func main() { + client, err := hatchet.NewClient() + if err != nil { + log.Fatalf("failed to create hatchet client: %v", err) + } + + // > Step 02 Wait For Event + waitForApproval := func(ctx hatchet.DurableContext) (map[string]interface{}, error) { + runID := ctx.WorkflowRunId() + expression := fmt.Sprintf("input.runId == '%s'", runID) + event, err := ctx.WaitForEvent("approval:decision", expression) + if err != nil { + return nil, err + } + var eventData map[string]interface{} + if err := hatchet.EventInto(event, &eventData); err != nil { + return nil, err + } + return eventData, nil + } + // !! + + // > Step 01 Define Approval Task + task := client.NewStandaloneDurableTask("approval-task", func(ctx hatchet.DurableContext, input ApprovalInput) (ApprovalOutput, error) { + proposedAction := map[string]string{"action": "send_email", "to": "user@example.com"} + approval, err := waitForApproval(ctx) + if err != nil { + return ApprovalOutput{}, err + } + approved, _ := approval["approved"].(bool) + if approved { + return ApprovalOutput{Status: "approved", Action: proposedAction}, nil + } + reason, _ := approval["reason"].(string) + return ApprovalOutput{Status: "rejected", Reason: reason}, nil + }) + // !! + + // > Step 04 Run Worker + worker, err := client.NewWorker("human-in-the-loop-worker", + hatchet.WithWorkflows(task), + hatchet.WithDurableSlots(5), + ) + if err != nil { + log.Fatalf("failed to create worker: %v", err) + } + + interruptCtx, cancel := cmdutils.NewInterruptContext() + defer cancel() + + go func() { + if err := worker.StartBlocking(interruptCtx); err != nil { + log.Fatalf("failed to start worker: %v", err) + } + }() + // !! + + <-interruptCtx.Done() +} diff --git a/sdks/guides/go/human-in-the-loop/trigger.go b/sdks/guides/go/human-in-the-loop/trigger.go new file mode 100644 index 0000000000..81e86b8435 --- /dev/null +++ b/sdks/guides/go/human-in-the-loop/trigger.go @@ -0,0 +1,19 @@ +package main + +import ( + "context" + + hatchet "github.com/hatchet-dev/hatchet/sdks/go" +) + +// > Step 03 Push Approval Event +// Include the runID so the event matches the specific task waiting for it. +func pushApproval(client *hatchet.Client, runID string, approved bool, reason string) { + _ = client.Events().Push(context.Background(), "approval:decision", map[string]interface{}{ + "runId": runID, + "approved": approved, + "reason": reason, + }) +} + +// !! diff --git a/sdks/guides/go/integrations/embedding_openai.go b/sdks/guides/go/integrations/embedding_openai.go new file mode 100644 index 0000000000..c1ebc3f756 --- /dev/null +++ b/sdks/guides/go/integrations/embedding_openai.go @@ -0,0 +1,26 @@ +// Third-party integration - requires: go get github.com/sashabaranov/go-openai +// See: /guides/rag-and-indexing + +package integrations + +import ( + "context" + "os" + + "github.com/sashabaranov/go-openai" +) + +// > OpenAI embedding usage +func Embed(ctx context.Context, text string) ([]float32, error) { + client := openai.NewClient(os.Getenv("OPENAI_API_KEY")) + resp, err := client.CreateEmbeddings(ctx, openai.EmbeddingRequest{ + Model: openai.AdaEmbeddingV2, + Input: text, + }) + if err != nil { + return nil, err + } + return resp.Data[0].Embedding, nil +} + +// !! diff --git a/sdks/guides/go/integrations/llm_openai.go b/sdks/guides/go/integrations/llm_openai.go new file mode 100644 index 0000000000..68c9751988 --- /dev/null +++ b/sdks/guides/go/integrations/llm_openai.go @@ -0,0 +1,38 @@ +// Third-party integration - requires: go get github.com/sashabaranov/go-openai +// See: /guides/ai-agents + +package integrations + +import ( + "context" + "encoding/json" + "os" + + "github.com/sashabaranov/go-openai" +) + +// > OpenAI usage +func Complete(ctx context.Context, messages []openai.ChatCompletionMessage) (map[string]interface{}, error) { + client := openai.NewClient(os.Getenv("OPENAI_API_KEY")) + resp, err := client.CreateChatCompletion(ctx, openai.ChatCompletionRequest{ + Model: openai.GPT4oMini, + Messages: messages, + }) + if err != nil { + return nil, err + } + msg := resp.Choices[0].Message + toolCalls := make([]map[string]interface{}, 0) + for _, tc := range msg.ToolCalls { + var args map[string]interface{} + _ = json.Unmarshal([]byte(tc.Function.Arguments), &args) + toolCalls = append(toolCalls, map[string]interface{}{"name": tc.Function.Name, "args": args}) + } + return map[string]interface{}{ + "content": msg.Content, + "tool_calls": toolCalls, + "done": len(toolCalls) == 0, + }, nil +} + +// !! diff --git a/sdks/guides/go/integrations/ocr_tesseract.go b/sdks/guides/go/integrations/ocr_tesseract.go new file mode 100644 index 0000000000..d4a722353d --- /dev/null +++ b/sdks/guides/go/integrations/ocr_tesseract.go @@ -0,0 +1,18 @@ +//go:build ignore + +// Third-party integration - requires: go get github.com/otiai10/gosseract/v2 +// and Tesseract C library. Build tag excludes from default build (no native deps in CI). +// See: /guides/document-processing + +package integrations + +import "github.com/otiai10/gosseract/v2" + +// > Tesseract usage +func ParseDocument(content []byte) (string, error) { + client := gosseract.NewClient() + defer client.Close() + return client.SetImageFromBytes(content).GetText() +} + +// !! diff --git a/sdks/guides/go/llm-pipelines/main.go b/sdks/guides/go/llm-pipelines/main.go new file mode 100644 index 0000000000..2a2444831d --- /dev/null +++ b/sdks/guides/go/llm-pipelines/main.go @@ -0,0 +1,76 @@ +package main + +import ( + "log" + + "github.com/hatchet-dev/hatchet/pkg/cmdutils" + hatchet "github.com/hatchet-dev/hatchet/sdks/go" +) + +type PipelineInput struct { + Prompt string `json:"prompt"` +} + +// generate is a mock - no external LLM API. +func generate(prompt string) map[string]interface{} { + n := 50 + if len(prompt) < n { + n = len(prompt) + } + return map[string]interface{}{"content": "Generated for: " + prompt[:n] + "...", "valid": true} +} + +func main() { + client, err := hatchet.NewClient() + if err != nil { + log.Fatalf("failed to create hatchet client: %v", err) + } + + // > Step 01 Define Pipeline + workflow := client.NewWorkflow("LLMPipeline") + // !! + + // > Step 02 Prompt Task + buildPrompt := func(userInput, context string) string { + if context != "" { + return "Process the following: " + userInput + "\nContext: " + context + } + return "Process the following: " + userInput + } + _ = buildPrompt + // !! + + promptTask := workflow.NewTask("prompt-task", func(ctx hatchet.Context, input PipelineInput) (map[string]interface{}, error) { + return map[string]interface{}{"prompt": input.Prompt}, nil + }) + + // > Step 03 Validate Task + generateTask := workflow.NewTask("generate-task", func(ctx hatchet.Context, input PipelineInput) (map[string]interface{}, error) { + var prev map[string]interface{} + if err := ctx.ParentOutput(promptTask, &prev); err != nil { + return nil, err + } + output := generate(prev["prompt"].(string)) + if !output["valid"].(bool) { + panic("validation failed") + } + return output, nil + }, hatchet.WithParents(promptTask)) + // !! + + _ = generateTask + + // > Step 04 Run Worker + worker, err := client.NewWorker("llm-pipeline-worker", hatchet.WithWorkflows(workflow)) + if err != nil { + log.Fatalf("failed to create worker: %v", err) + } + + interruptCtx, cancel := cmdutils.NewInterruptContext() + defer cancel() + + if err := worker.StartBlocking(interruptCtx); err != nil { + log.Fatalf("failed to start worker: %v", err) + } + // !! +} diff --git a/sdks/guides/go/multi-agent/main.go b/sdks/guides/go/multi-agent/main.go new file mode 100644 index 0000000000..9281ba105b --- /dev/null +++ b/sdks/guides/go/multi-agent/main.go @@ -0,0 +1,103 @@ +package main + +import ( + "fmt" + "log" + "strings" + + "github.com/hatchet-dev/hatchet/pkg/cmdutils" + hatchet "github.com/hatchet-dev/hatchet/sdks/go" +) + +type SpecialistInput struct { + Task string `json:"task"` + Context string `json:"context,omitempty"` +} + +func main() { + client, err := hatchet.NewClient() + if err != nil { + log.Fatalf("failed to create hatchet client: %v", err) + } + + // > Step 01 Specialist Agents + researchTask := client.NewStandaloneDurableTask("research-specialist", func(ctx hatchet.DurableContext, input SpecialistInput) (map[string]interface{}, error) { + return map[string]interface{}{"result": MockSpecialistLLM(input.Task, "research")}, nil + }) + + writingTask := client.NewStandaloneDurableTask("writing-specialist", func(ctx hatchet.DurableContext, input SpecialistInput) (map[string]interface{}, error) { + return map[string]interface{}{"result": MockSpecialistLLM(input.Task, "writing")}, nil + }) + + codeTask := client.NewStandaloneDurableTask("code-specialist", func(ctx hatchet.DurableContext, input SpecialistInput) (map[string]interface{}, error) { + return map[string]interface{}{"result": MockSpecialistLLM(input.Task, "code")}, nil + }) + // !! + + specialists := map[string]*hatchet.StandaloneTask{ + "research": researchTask, + "writing": writingTask, + "code": codeTask, + } + + // > Step 02 Orchestrator Loop + orchestrator := client.NewStandaloneDurableTask("multi-agent-orchestrator", func(ctx hatchet.DurableContext, input map[string]interface{}) (map[string]interface{}, error) { + messages := []map[string]interface{}{{"role": "user", "content": input["goal"].(string)}} + + for i := 0; i < 10; i++ { + response := MockOrchestratorLLM(messages) + + if response.Done { + return map[string]interface{}{"result": response.Content}, nil + } + + specialist, ok := specialists[response.ToolCall.Name] + if !ok { + return nil, fmt.Errorf("unknown specialist: %s", response.ToolCall.Name) + } + + var contextParts []string + for _, m := range messages { + contextParts = append(contextParts, m["content"].(string)) + } + + taskResult, err := specialist.Run(ctx, SpecialistInput{ + Task: response.ToolCall.Args["task"], + Context: strings.Join(contextParts, "\n"), + }) + if err != nil { + return nil, err + } + var result map[string]interface{} + if err := taskResult.Into(&result); err != nil { + return nil, err + } + + messages = append(messages, + map[string]interface{}{"role": "assistant", "content": fmt.Sprintf("Called %s", response.ToolCall.Name)}, + map[string]interface{}{"role": "tool", "content": result["result"].(string)}, + ) + } + + return map[string]interface{}{"result": "Max iterations reached"}, nil + }) + // !! + + // > Step 03 Run Worker + worker, err := client.NewWorker("multi-agent-worker", + hatchet.WithWorkflows(researchTask, writingTask, codeTask, orchestrator), + hatchet.WithSlots(10), + hatchet.WithDurableSlots(5), + ) + if err != nil { + log.Fatalf("failed to create worker: %v", err) + } + + interruptCtx, cancel := cmdutils.NewInterruptContext() + defer cancel() + + if err := worker.StartBlocking(interruptCtx); err != nil { + log.Fatalf("failed to start worker: %v", err) + } + // !! +} diff --git a/sdks/guides/go/multi-agent/mock_llm.go b/sdks/guides/go/multi-agent/mock_llm.go new file mode 100644 index 0000000000..66ae295af6 --- /dev/null +++ b/sdks/guides/go/multi-agent/mock_llm.go @@ -0,0 +1,36 @@ +package main + +import "fmt" + +var orchestratorCallCount int + +type OrchestratorResponse struct { + Done bool + Content string + ToolCall *struct { + Name string + Args map[string]string + } +} + +func MockOrchestratorLLM(messages []map[string]interface{}) OrchestratorResponse { + orchestratorCallCount++ + switch orchestratorCallCount { + case 1: + return OrchestratorResponse{Done: false, ToolCall: &struct { + Name string + Args map[string]string + }{Name: "research", Args: map[string]string{"task": "Find key facts about the topic"}}} + case 2: + return OrchestratorResponse{Done: false, ToolCall: &struct { + Name string + Args map[string]string + }{Name: "writing", Args: map[string]string{"task": "Write a summary from the research"}}} + default: + return OrchestratorResponse{Done: true, Content: "Here is the final report combining research and writing."} + } +} + +func MockSpecialistLLM(task, role string) string { + return fmt.Sprintf("[%s] Completed: %s", role, task) +} diff --git a/sdks/guides/go/parallelization/main.go b/sdks/guides/go/parallelization/main.go new file mode 100644 index 0000000000..24c6d83069 --- /dev/null +++ b/sdks/guides/go/parallelization/main.go @@ -0,0 +1,144 @@ +package main + +import ( + "log" + "sync" + + "github.com/hatchet-dev/hatchet/pkg/cmdutils" + hatchet "github.com/hatchet-dev/hatchet/sdks/go" +) + +type MessageInput struct { + Message string `json:"message"` +} + +type ContentInput struct { + Content string `json:"content"` +} + +func main() { + client, err := hatchet.NewClient() + if err != nil { + log.Fatalf("failed to create hatchet client: %v", err) + } + + // > Step 01 Parallel Tasks + contentTask := client.NewStandaloneTask("generate-content", func(ctx hatchet.Context, input MessageInput) (map[string]interface{}, error) { + return map[string]interface{}{"content": MockGenerateContent(input.Message)}, nil + }) + + safetyTask := client.NewStandaloneTask("safety-check", func(ctx hatchet.Context, input MessageInput) (map[string]interface{}, error) { + result := MockSafetyCheck(input.Message) + return map[string]interface{}{"safe": result.Safe, "reason": result.Reason}, nil + }) + + evaluateTask := client.NewStandaloneTask("evaluate-content", func(ctx hatchet.Context, input ContentInput) (map[string]interface{}, error) { + result := MockEvaluateContent(input.Content) + return map[string]interface{}{"score": result.Score, "approved": result.Approved}, nil + }) + // !! + + // > Step 02 Sectioning + sectioningTask := client.NewStandaloneDurableTask("parallel-sectioning", func(ctx hatchet.DurableContext, input map[string]interface{}) (map[string]interface{}, error) { + msg := input["message"].(string) + + var contentTr, safetyTr *hatchet.TaskResult + var contentErr, safetyErr error + var wg sync.WaitGroup + wg.Add(2) + + go func() { + defer wg.Done() + contentTr, contentErr = contentTask.Run(ctx, MessageInput{Message: msg}) + }() + go func() { + defer wg.Done() + safetyTr, safetyErr = safetyTask.Run(ctx, MessageInput{Message: msg}) + }() + wg.Wait() + + if contentErr != nil { + return nil, contentErr + } + if safetyErr != nil { + return nil, safetyErr + } + var contentResult, safetyResult map[string]interface{} + if err := contentTr.Into(&contentResult); err != nil { + return nil, err + } + if err := safetyTr.Into(&safetyResult); err != nil { + return nil, err + } + + if safe, ok := safetyResult["safe"].(bool); !ok || !safe { + return map[string]interface{}{"blocked": true, "reason": safetyResult["reason"]}, nil + } + return map[string]interface{}{"blocked": false, "content": contentResult["content"]}, nil + }) + // !! + + // > Step 03 Voting + votingTask := client.NewStandaloneDurableTask("parallel-voting", func(ctx hatchet.DurableContext, input map[string]interface{}) (map[string]interface{}, error) { + content := input["content"].(string) + numVoters := 3 + taskResults := make([]*hatchet.TaskResult, numVoters) + errs := make([]error, numVoters) + + var wg sync.WaitGroup + for i := 0; i < numVoters; i++ { + wg.Add(1) + go func(idx int) { + defer wg.Done() + taskResults[idx], errs[idx] = evaluateTask.Run(ctx, ContentInput{Content: content}) + }(i) + } + wg.Wait() + + results := make([]map[string]interface{}, numVoters) + for i := 0; i < numVoters; i++ { + if errs[i] != nil { + return nil, errs[i] + } + if err := taskResults[i].Into(&results[i]); err != nil { + return nil, err + } + } + + approvals := 0 + totalScore := 0.0 + for _, r := range results { + if approved, ok := r["approved"].(bool); ok && approved { + approvals++ + } + if score, ok := r["score"].(float64); ok { + totalScore += score + } + } + + return map[string]interface{}{ + "approved": approvals >= 2, + "averageScore": totalScore / float64(numVoters), + "votes": numVoters, + }, nil + }) + // !! + + // > Step 04 Run Worker + worker, err := client.NewWorker("parallelization-worker", + hatchet.WithWorkflows(contentTask, safetyTask, evaluateTask, sectioningTask, votingTask), + hatchet.WithSlots(10), + hatchet.WithDurableSlots(5), + ) + if err != nil { + log.Fatalf("failed to create worker: %v", err) + } + + interruptCtx, cancel := cmdutils.NewInterruptContext() + defer cancel() + + if err := worker.StartBlocking(interruptCtx); err != nil { + log.Fatalf("failed to start worker: %v", err) + } + // !! +} diff --git a/sdks/guides/go/parallelization/mock_llm.go b/sdks/guides/go/parallelization/mock_llm.go new file mode 100644 index 0000000000..a8f75a2dc4 --- /dev/null +++ b/sdks/guides/go/parallelization/mock_llm.go @@ -0,0 +1,32 @@ +package main + +import "strings" + +func MockGenerateContent(message string) string { + return "Here is a helpful response to: " + message +} + +type SafetyResult struct { + Safe bool + Reason string +} + +func MockSafetyCheck(message string) SafetyResult { + if strings.Contains(strings.ToLower(message), "unsafe") { + return SafetyResult{Safe: false, Reason: "Content flagged as potentially unsafe."} + } + return SafetyResult{Safe: true, Reason: "Content is appropriate."} +} + +type EvalResult struct { + Score float64 + Approved bool +} + +func MockEvaluateContent(content string) EvalResult { + score := 0.3 + if len(content) > 20 { + score = 0.85 + } + return EvalResult{Score: score, Approved: score >= 0.7} +} diff --git a/sdks/guides/go/rag-and-indexing/main.go b/sdks/guides/go/rag-and-indexing/main.go new file mode 100644 index 0000000000..6489353e0e --- /dev/null +++ b/sdks/guides/go/rag-and-indexing/main.go @@ -0,0 +1,136 @@ +package main + +import ( + "log" + + "github.com/hatchet-dev/hatchet/pkg/cmdutils" + hatchet "github.com/hatchet-dev/hatchet/sdks/go" +) + +type DocInput struct { + DocID string `json:"doc_id"` + Content string `json:"content"` +} + +type ChunkInput struct { + Chunk string `json:"chunk"` +} + +type QueryInput struct { + Query string `json:"query"` + TopK int `json:"top_k"` +} + +func embed(text string) []float64 { + vec := make([]float64, 64) + for i := range vec { + vec[i] = 0.1 + } + return vec +} + +func main() { + client, err := hatchet.NewClient() + if err != nil { + log.Fatalf("failed to create hatchet client: %v", err) + } + + // > Step 01 Define Workflow + workflow := client.NewWorkflow("RAGPipeline") + // !! + + // > Step 02 Define Ingest Task + ingest := workflow.NewTask("ingest", func(ctx hatchet.Context, input DocInput) (map[string]interface{}, error) { + return map[string]interface{}{"doc_id": input.DocID, "content": input.Content}, nil + }) + // !! + + // > Step 03 Chunk Task + chunkContent := func(content string, chunkSize int) []string { + var chunks []string + for i := 0; i < len(content); i += chunkSize { + end := i + chunkSize + if end > len(content) { + end = len(content) + } + chunks = append(chunks, content[i:end]) + } + return chunks + } + _ = chunkContent + // !! + + // > Step 04 Embed Task + embedChunkTask := client.NewStandaloneTask("embed-chunk", func(ctx hatchet.Context, input ChunkInput) (map[string]interface{}, error) { + return map[string]interface{}{"vector": embed(input.Chunk)}, nil + }) + + chunkAndEmbed := workflow.NewDurableTask("chunk-and-embed", func(ctx hatchet.DurableContext, input DocInput) (map[string]interface{}, error) { + var ingested map[string]interface{} + if err := ctx.ParentOutput(ingest, &ingested); err != nil { + return nil, err + } + content := ingested["content"].(string) + var chunks []string + for i := 0; i < len(content); i += 100 { + end := i + 100 + if end > len(content) { + end = len(content) + } + chunks = append(chunks, content[i:end]) + } + inputs := make([]hatchet.RunManyOpt, len(chunks)) + for i, c := range chunks { + inputs[i] = hatchet.RunManyOpt{Input: ChunkInput{Chunk: c}} + } + runRefs, err := embedChunkTask.RunMany(ctx, inputs) + if err != nil { + return nil, err + } + vectors := make([]interface{}, len(runRefs)) + for i, ref := range runRefs { + result, err := ref.Result() + if err != nil { + return nil, err + } + var parsed map[string]interface{} + if err := result.TaskOutput("embed-chunk").Into(&parsed); err != nil { + return nil, err + } + vectors[i] = parsed["vector"] + } + return map[string]interface{}{"doc_id": ingested["doc_id"], "vectors": vectors}, nil + }, hatchet.WithParents(ingest)) + // !! + + _ = chunkAndEmbed + + // > Step 05 Query Task + queryTask := client.NewStandaloneDurableTask("rag-query", func(ctx hatchet.DurableContext, input QueryInput) (map[string]interface{}, error) { + res, err := embedChunkTask.Run(ctx, ChunkInput{Chunk: input.Query}) + if err != nil { + return nil, err + } + var parsed map[string]interface{} + if err := res.Into(&parsed); err != nil { + return nil, err + } + // Replace with a real vector DB lookup in production + return map[string]interface{}{"query": input.Query, "vector": parsed["vector"], "results": []interface{}{}}, nil + }) + // !! + + // > Step 06 Run Worker + worker, err := client.NewWorker("rag-worker", hatchet.WithWorkflows(workflow, embedChunkTask, queryTask)) + if err != nil { + log.Fatalf("failed to create worker: %v", err) + } + + interruptCtx, cancel := cmdutils.NewInterruptContext() + defer cancel() + + if err := worker.StartBlocking(interruptCtx); err != nil { + log.Fatalf("failed to start worker: %v", err) + } + // !! +} diff --git a/sdks/guides/go/routing/main.go b/sdks/guides/go/routing/main.go new file mode 100644 index 0000000000..2ff3748e4b --- /dev/null +++ b/sdks/guides/go/routing/main.go @@ -0,0 +1,91 @@ +package main + +import ( + "log" + + "github.com/hatchet-dev/hatchet/pkg/cmdutils" + hatchet "github.com/hatchet-dev/hatchet/sdks/go" +) + +type MessageInput struct { + Message string `json:"message"` +} + +func main() { + client, err := hatchet.NewClient() + if err != nil { + log.Fatalf("failed to create hatchet client: %v", err) + } + + // > Step 01 Classify Task + classifyTask := client.NewStandaloneDurableTask("classify-message", func(ctx hatchet.DurableContext, input MessageInput) (map[string]interface{}, error) { + return map[string]interface{}{"category": MockClassify(input.Message)}, nil + }) + // !! + + // > Step 02 Specialist Tasks + supportTask := client.NewStandaloneDurableTask("handle-support", func(ctx hatchet.DurableContext, input MessageInput) (map[string]interface{}, error) { + return map[string]interface{}{"response": MockReply(input.Message, "support"), "category": "support"}, nil + }) + + salesTask := client.NewStandaloneDurableTask("handle-sales", func(ctx hatchet.DurableContext, input MessageInput) (map[string]interface{}, error) { + return map[string]interface{}{"response": MockReply(input.Message, "sales"), "category": "sales"}, nil + }) + + defaultTask := client.NewStandaloneDurableTask("handle-default", func(ctx hatchet.DurableContext, input MessageInput) (map[string]interface{}, error) { + return map[string]interface{}{"response": MockReply(input.Message, "other"), "category": "other"}, nil + }) + // !! + + // > Step 03 Router Task + routerTask := client.NewStandaloneDurableTask("message-router", func(ctx hatchet.DurableContext, input map[string]interface{}) (map[string]interface{}, error) { + msg := input["message"].(string) + classResult, err := classifyTask.Run(ctx, MessageInput{Message: msg}) + if err != nil { + return nil, err + } + var classData map[string]interface{} + if err := classResult.Into(&classData); err != nil { + return nil, err + } + + runAndUnmarshal := func(t *hatchet.StandaloneTask) (map[string]interface{}, error) { + tr, err := t.Run(ctx, MessageInput{Message: msg}) + if err != nil { + return nil, err + } + var out map[string]interface{} + if err := tr.Into(&out); err != nil { + return nil, err + } + return out, nil + } + switch classData["category"].(string) { + case "support": + return runAndUnmarshal(supportTask) + case "sales": + return runAndUnmarshal(salesTask) + default: + return runAndUnmarshal(defaultTask) + } + }) + // !! + + // > Step 04 Run Worker + worker, err := client.NewWorker("routing-worker", + hatchet.WithWorkflows(classifyTask, supportTask, salesTask, defaultTask, routerTask), + hatchet.WithSlots(5), + hatchet.WithDurableSlots(5), + ) + if err != nil { + log.Fatalf("failed to create worker: %v", err) + } + + interruptCtx, cancel := cmdutils.NewInterruptContext() + defer cancel() + + if err := worker.StartBlocking(interruptCtx); err != nil { + log.Fatalf("failed to start worker: %v", err) + } + // !! +} diff --git a/sdks/guides/go/routing/mock_classifier.go b/sdks/guides/go/routing/mock_classifier.go new file mode 100644 index 0000000000..2cf86711af --- /dev/null +++ b/sdks/guides/go/routing/mock_classifier.go @@ -0,0 +1,29 @@ +package main + +import "strings" + +func MockClassify(message string) string { + lower := strings.ToLower(message) + for _, w := range []string{"bug", "error", "help"} { + if strings.Contains(lower, w) { + return "support" + } + } + for _, w := range []string{"price", "buy", "plan"} { + if strings.Contains(lower, w) { + return "sales" + } + } + return "other" +} + +func MockReply(message, role string) string { + switch role { + case "support": + return "[Support] I can help with that technical issue. Let me look into: " + message + case "sales": + return "[Sales] Great question about pricing! Here's what I can tell you about: " + message + default: + return "[General] Thanks for reaching out. Regarding: " + message + } +} diff --git a/sdks/guides/go/scheduled-jobs/main.go b/sdks/guides/go/scheduled-jobs/main.go new file mode 100644 index 0000000000..2e4c2dd0cc --- /dev/null +++ b/sdks/guides/go/scheduled-jobs/main.go @@ -0,0 +1,37 @@ +package main + +import ( + "log" + + "github.com/hatchet-dev/hatchet/pkg/cmdutils" + hatchet "github.com/hatchet-dev/hatchet/sdks/go" +) + +func main() { + client, err := hatchet.NewClient() + if err != nil { + log.Fatalf("failed to create hatchet client: %v", err) + } + + // > Step 01 Define Cron Task + task := client.NewStandaloneTask("run-scheduled-job", func(ctx hatchet.Context, input map[string]interface{}) (map[string]string, error) { + return map[string]string{"status": "completed", "job": "maintenance"}, nil + }, hatchet.WithWorkflowCron("0 * * * *")) + // !! + + // > Step 03 Run Worker + worker, err := client.NewWorker("scheduled-worker", hatchet.WithWorkflows(task)) + if err != nil { + log.Fatalf("failed to create worker: %v", err) + } + + interruptCtx, cancel := cmdutils.NewInterruptContext() + defer cancel() + + go scheduleOneTime(client) + + if err := worker.StartBlocking(interruptCtx); err != nil { + log.Fatalf("failed to start worker: %v", err) + } + // !! +} diff --git a/sdks/guides/go/scheduled-jobs/scheduled-jobs b/sdks/guides/go/scheduled-jobs/scheduled-jobs new file mode 100755 index 0000000000..7342fc74e6 Binary files /dev/null and b/sdks/guides/go/scheduled-jobs/scheduled-jobs differ diff --git a/sdks/guides/go/scheduled-jobs/trigger.go b/sdks/guides/go/scheduled-jobs/trigger.go new file mode 100644 index 0000000000..f26cd76399 --- /dev/null +++ b/sdks/guides/go/scheduled-jobs/trigger.go @@ -0,0 +1,24 @@ +package main + +import ( + "context" + "log" + "time" + + hatchet "github.com/hatchet-dev/hatchet/sdks/go" + "github.com/hatchet-dev/hatchet/sdks/go/features" +) + +// > Step 02 Schedule One Time +func scheduleOneTime(client *hatchet.Client) { + runAt := time.Now().Add(1 * time.Hour) + _, err := client.Schedules().Create(context.Background(), "run-scheduled-job", features.CreateScheduledRunTrigger{ + TriggerAt: runAt, + Input: map[string]interface{}{}, + }) + if err != nil { + log.Printf("failed to schedule: %v", err) + } +} + +// !! diff --git a/sdks/guides/go/streaming/client.go b/sdks/guides/go/streaming/client.go new file mode 100644 index 0000000000..d5fa79db97 --- /dev/null +++ b/sdks/guides/go/streaming/client.go @@ -0,0 +1,20 @@ +package main + +import ( + "context" + "fmt" + + hatchet "github.com/hatchet-dev/hatchet/sdks/go" +) + +// > Step 03 Subscribe Client +// Client triggers the task and subscribes to the stream. +func runAndSubscribe(client *hatchet.Client) { + runRef, _ := client.RunNoWait(context.Background(), "stream-example", map[string]interface{}{}) + stream := client.Runs().SubscribeToStream(context.Background(), runRef.RunId) + for chunk := range stream { + fmt.Print(chunk) + } +} + +// !! diff --git a/sdks/guides/go/streaming/main.go b/sdks/guides/go/streaming/main.go new file mode 100644 index 0000000000..096b553fdc --- /dev/null +++ b/sdks/guides/go/streaming/main.go @@ -0,0 +1,50 @@ +package main + +import ( + "log" + "time" + + "github.com/hatchet-dev/hatchet/pkg/cmdutils" + hatchet "github.com/hatchet-dev/hatchet/sdks/go" +) + +func main() { + client, err := hatchet.NewClient() + if err != nil { + log.Fatalf("failed to create hatchet client: %v", err) + } + + // > Step 01 Define Streaming Task + task := client.NewStandaloneTask("stream-example", func(ctx hatchet.Context, input map[string]interface{}) (map[string]string, error) { + for i := 0; i < 5; i++ { + ctx.PutStream("chunk-" + string(rune('0'+i))) + time.Sleep(500 * time.Millisecond) + } + return map[string]string{"status": "done"}, nil + }) + // !! + + // > Step 02 Emit Chunks + emitChunks := func(ctx hatchet.Context) { + for i := 0; i < 5; i++ { + ctx.PutStream("chunk-" + string(rune('0'+i))) + time.Sleep(500 * time.Millisecond) + } + } + _ = emitChunks + // !! + + // > Step 04 Run Worker + worker, err := client.NewWorker("streaming-worker", hatchet.WithWorkflows(task)) + if err != nil { + log.Fatalf("failed to create worker: %v", err) + } + + interruptCtx, cancel := cmdutils.NewInterruptContext() + defer cancel() + + if err := worker.StartBlocking(interruptCtx); err != nil { + log.Fatalf("failed to start worker: %v", err) + } + // !! +} diff --git a/sdks/guides/go/web-scraping/main.go b/sdks/guides/go/web-scraping/main.go new file mode 100644 index 0000000000..53944b039e --- /dev/null +++ b/sdks/guides/go/web-scraping/main.go @@ -0,0 +1,129 @@ +package main + +import ( + "log" + "regexp" + "strings" + + "github.com/hatchet-dev/hatchet/pkg/client/types" + "github.com/hatchet-dev/hatchet/pkg/cmdutils" + hatchet "github.com/hatchet-dev/hatchet/sdks/go" + "github.com/hatchet-dev/hatchet/sdks/go/features" +) + +type ScrapeInput struct { + URL string `json:"url"` +} + +type ProcessInput struct { + URL string `json:"url"` + Content string `json:"content"` +} + +const scrapeRateLimitKey = "scrape-rate-limit" + +func main() { + client, err := hatchet.NewClient() + if err != nil { + log.Fatalf("failed to create hatchet client: %v", err) + } + + // > Step 01 Define Scrape Task + scrapeTask := client.NewStandaloneTask("scrape-url", func(ctx hatchet.Context, input ScrapeInput) (map[string]interface{}, error) { + result := MockScrape(input.URL) + return map[string]interface{}{ + "url": result.URL, "title": result.Title, + "content": result.Content, "scraped_at": result.ScrapedAt, + }, nil + }, hatchet.WithRetries(2)) + // !! + + // > Step 02 Process Content + linkRe := regexp.MustCompile(`https?://[^\s<>"']+`) + processTask := client.NewStandaloneTask("process-content", func(ctx hatchet.Context, input ProcessInput) (map[string]interface{}, error) { + links := linkRe.FindAllString(input.Content, -1) + summary := input.Content + if len(summary) > 200 { + summary = summary[:200] + } + wordCount := len(strings.Fields(input.Content)) + return map[string]interface{}{ + "summary": strings.TrimSpace(summary), "word_count": wordCount, "links": links, + }, nil + }) + // !! + + // > Step 03 Cron Workflow + cronWf := client.NewWorkflow("WebScrapeWorkflow", hatchet.WithWorkflowCron("0 */6 * * *")) + + cronWf.NewTask("scheduled-scrape", func(ctx hatchet.Context, input map[string]interface{}) (map[string]interface{}, error) { + urls := []string{ + "https://example.com/pricing", + "https://example.com/blog", + "https://example.com/docs", + } + + results := []map[string]string{} + for _, url := range urls { + scrapedResult, err := scrapeTask.Run(ctx, ScrapeInput{URL: url}) + if err != nil { + return nil, err + } + var scraped map[string]interface{} + if err := scrapedResult.Into(&scraped); err != nil { + return nil, err + } + processedResult, err := processTask.Run(ctx, ProcessInput{URL: url, Content: scraped["content"].(string)}) + if err != nil { + return nil, err + } + var processed map[string]string + if err := processedResult.Into(&processed); err != nil { + return nil, err + } + results = append(results, processed) + } + return map[string]interface{}{"refreshed": len(results), "results": results}, nil + }) + // !! + + // > Step 04 Rate Limited Scrape + units := 1 + rateLimitedScrapeTask := client.NewStandaloneTask("rate-limited-scrape", func(ctx hatchet.Context, input ScrapeInput) (map[string]interface{}, error) { + result := MockScrape(input.URL) + return map[string]interface{}{ + "url": result.URL, "title": result.Title, + "content": result.Content, "scraped_at": result.ScrapedAt, + }, nil + }, hatchet.WithRetries(2), hatchet.WithRateLimits(&types.RateLimit{ + Key: scrapeRateLimitKey, + Units: &units, + })) + // !! + + // > Step 05 Run Worker + err = client.RateLimits().Upsert(features.CreateRatelimitOpts{ + Key: scrapeRateLimitKey, + Limit: 10, + Duration: types.Minute, + }) + if err != nil { + log.Fatalf("failed to upsert rate limit: %v", err) + } + + worker, err := client.NewWorker("web-scraping-worker", + hatchet.WithWorkflows(scrapeTask, processTask, cronWf, rateLimitedScrapeTask), + hatchet.WithSlots(5), + ) + if err != nil { + log.Fatalf("failed to create worker: %v", err) + } + + interruptCtx, cancel := cmdutils.NewInterruptContext() + defer cancel() + + if err := worker.StartBlocking(interruptCtx); err != nil { + log.Fatalf("failed to start worker: %v", err) + } + // !! +} diff --git a/sdks/guides/go/web-scraping/mock_scraper.go b/sdks/guides/go/web-scraping/mock_scraper.go new file mode 100644 index 0000000000..dcc72de7a3 --- /dev/null +++ b/sdks/guides/go/web-scraping/mock_scraper.go @@ -0,0 +1,36 @@ +package main + +import "time" + +type ScrapeResult struct { + URL string `json:"url"` + Title string `json:"title"` + Content string `json:"content"` + ScrapedAt string `json:"scraped_at"` +} + +func MockScrape(url string) ScrapeResult { + return ScrapeResult{ + URL: url, + Title: "Page: " + url, + Content: "Mock scraped content from " + url + ". In production, use Firecrawl, Browserbase, or Playwright here.", + ScrapedAt: time.Now().UTC().Format(time.RFC3339), + } +} + +func MockExtract(content string) map[string]string { + summary := content + if len(summary) > 80 { + summary = summary[:80] + } + words := 0 + for _, c := range content { + if c == ' ' { + words++ + } + } + return map[string]string{ + "summary": summary, + "word_count": string(rune(words + 1)), + } +} diff --git a/sdks/guides/go/webhook-processing/main.go b/sdks/guides/go/webhook-processing/main.go new file mode 100644 index 0000000000..b63a799ad6 --- /dev/null +++ b/sdks/guides/go/webhook-processing/main.go @@ -0,0 +1,61 @@ +package main + +import ( + "context" + "log" + + "github.com/hatchet-dev/hatchet/pkg/cmdutils" + hatchet "github.com/hatchet-dev/hatchet/sdks/go" +) + +type WebhookPayload struct { + EventID string `json:"event_id"` + Type string `json:"type"` + Data map[string]interface{} `json:"data"` +} + +func main() { + client, err := hatchet.NewClient() + if err != nil { + log.Fatalf("failed to create hatchet client: %v", err) + } + + // > Step 01 Define Webhook Task + task := client.NewStandaloneTask("process-webhook", func(ctx hatchet.Context, input WebhookPayload) (map[string]string, error) { + return map[string]string{"processed": input.EventID, "type": input.Type}, nil + }, hatchet.WithWorkflowEvents("webhook:stripe", "webhook:github")) + // !! + + // > Step 02 Register Webhook + // Call from your webhook endpoint to trigger the task. + forwardWebhook := func(eventKey string, payload map[string]interface{}) { + _ = client.Events().Push(context.Background(), eventKey, payload) + } + _ = forwardWebhook + // !! + + // > Step 03 Process Payload + // Validate event_id for deduplication; process idempotently. + validateAndProcess := func(input WebhookPayload) (map[string]string, error) { + if input.EventID == "" { + return nil, nil // or return error + } + return map[string]string{"processed": input.EventID, "type": input.Type}, nil + } + _ = validateAndProcess + // !! + + // > Step 04 Run Worker + worker, err := client.NewWorker("webhook-worker", hatchet.WithWorkflows(task)) + if err != nil { + log.Fatalf("failed to create worker: %v", err) + } + + interruptCtx, cancel := cmdutils.NewInterruptContext() + defer cancel() + + if err := worker.StartBlocking(interruptCtx); err != nil { + log.Fatalf("failed to start worker: %v", err) + } + // !! +} diff --git a/sdks/guides/hatchet-client.ts b/sdks/guides/hatchet-client.ts new file mode 100644 index 0000000000..0141746e4e --- /dev/null +++ b/sdks/guides/hatchet-client.ts @@ -0,0 +1,4 @@ +// Shared hatchet client for TypeScript guides +import Hatchet from '@hatchet-dev/typescript-sdk'; + +export const hatchet = Hatchet.init(); diff --git a/sdks/guides/python/ai_agents/__pycache__/mock_agent.cpython-310.pyc b/sdks/guides/python/ai_agents/__pycache__/mock_agent.cpython-310.pyc new file mode 100644 index 0000000000..582a5b54d8 Binary files /dev/null and b/sdks/guides/python/ai_agents/__pycache__/mock_agent.cpython-310.pyc differ diff --git a/sdks/guides/python/ai_agents/__pycache__/worker.cpython-310.pyc b/sdks/guides/python/ai_agents/__pycache__/worker.cpython-310.pyc new file mode 100644 index 0000000000..183b06287c Binary files /dev/null and b/sdks/guides/python/ai_agents/__pycache__/worker.cpython-310.pyc differ diff --git a/sdks/guides/python/ai_agents/llm_service.py b/sdks/guides/python/ai_agents/llm_service.py new file mode 100644 index 0000000000..27930d3008 --- /dev/null +++ b/sdks/guides/python/ai_agents/llm_service.py @@ -0,0 +1,49 @@ +"""Encapsulated LLM service - swap MockLLMService for OpenAI/Anthropic in production. + +See docs: /guides/ai-agents +""" + +from abc import ABC, abstractmethod + + +class LLMService(ABC): + """Interface for LLM completion. Implement with OpenAI, Anthropic, etc.""" + + @abstractmethod + def complete(self, messages: list[dict]) -> dict: + """Complete a chat. Returns {content, tool_calls, done}.""" + pass + + +class MockLLMService(LLMService): + """No external API - for local development and tests.""" + + def __init__(self) -> None: + self._call_count: dict[str, int] = {} + + def complete(self, messages: list[dict]) -> dict: + key = "default" + self._call_count[key] = self._call_count.get(key, 0) + 1 + if self._call_count[key] == 1: + return { + "content": "", + "tool_calls": [{"name": "get_weather", "args": {"location": "SF"}}], + "done": False, + } + return {"content": "It's 72°F and sunny in SF.", "tool_calls": [], "done": True} + + +# Default: mock. Override with getenv or DI for production. +_llm_service: LLMService | None = None + + +def get_llm_service() -> LLMService: + global _llm_service + if _llm_service is None: + _llm_service = MockLLMService() + return _llm_service + + +def set_llm_service(service: LLMService) -> None: + global _llm_service + _llm_service = service diff --git a/sdks/guides/python/ai_agents/mock_agent.py b/sdks/guides/python/ai_agents/mock_agent.py new file mode 100644 index 0000000000..9453dd5bd2 --- /dev/null +++ b/sdks/guides/python/ai_agents/mock_agent.py @@ -0,0 +1,24 @@ +"""Mock LLM and tools - no external API dependencies.""" + +_call_count: dict[str, int] = {} + + +def call_llm(messages: list[dict]) -> dict: + """Mock LLM: first call returns tool_calls, second returns final answer.""" + key = "default" + _call_count[key] = _call_count.get(key, 0) + 1 + if _call_count[key] == 1: + return { + "content": "", + "tool_calls": [{"name": "get_weather", "args": {"location": "SF"}}], + "done": False, + } + return {"content": "It's 72°F and sunny in SF.", "tool_calls": [], "done": True} + + +def run_tool(name: str, args: dict) -> str: + """Mock tool execution - returns canned results.""" + if name == "get_weather": + loc = args.get("location", "unknown") + return f"Weather in {loc}: 72°F, sunny" + return f"Unknown tool: {name}" diff --git a/sdks/guides/python/ai_agents/tool_service.py b/sdks/guides/python/ai_agents/tool_service.py new file mode 100644 index 0000000000..5b228d09dc --- /dev/null +++ b/sdks/guides/python/ai_agents/tool_service.py @@ -0,0 +1,40 @@ +"""Encapsulated tool execution - swap MockToolService for real APIs in production. + +See docs: /guides/ai-agents +""" + +from abc import ABC, abstractmethod + + +class ToolService(ABC): + """Interface for agent tool execution. Implement with your APIs.""" + + @abstractmethod + def run(self, name: str, args: dict) -> str: + """Execute a tool. Returns string result.""" + pass + + +class MockToolService(ToolService): + """No external API - returns canned results for demos.""" + + def run(self, name: str, args: dict) -> str: + if name == "get_weather": + loc = args.get("location", "unknown") + return f"Weather in {loc}: 72°F, sunny" + return f"Unknown tool: {name}" + + +_tool_service: ToolService | None = None + + +def get_tool_service() -> ToolService: + global _tool_service + if _tool_service is None: + _tool_service = MockToolService() + return _tool_service + + +def set_tool_service(service: ToolService) -> None: + global _tool_service + _tool_service = service diff --git a/sdks/guides/python/ai_agents/worker.py b/sdks/guides/python/ai_agents/worker.py new file mode 100644 index 0000000000..eebfe83415 --- /dev/null +++ b/sdks/guides/python/ai_agents/worker.py @@ -0,0 +1,87 @@ +from hatchet_sdk import ( + ConcurrencyExpression, + ConcurrencyLimitStrategy, + DurableContext, + EmptyModel, + Hatchet, +) + +try: + from .llm_service import get_llm_service + from .tool_service import get_tool_service +except ImportError: + from llm_service import get_llm_service + from tool_service import get_tool_service + +hatchet = Hatchet(debug=True) + + +# > Step 01 Define Agent Task +@hatchet.durable_task( + name="ReasoningLoopAgent", + concurrency=ConcurrencyExpression( + expression="input.session_id != null ? string(input.session_id) : 'constant'", + max_runs=1, + limit_strategy=ConcurrencyLimitStrategy.CANCEL_IN_PROGRESS, + ), +) +async def agent_task(input: EmptyModel, ctx: DurableContext) -> dict: + """Agent loop: reason, act, observe. Streams output, survives restarts.""" + query = "Hello" + if isinstance(input, dict) and input.get("query"): + query = str(input["query"]) + elif hasattr(input, "query") and input.query: + query = str(input.query) + return await agent_reasoning_loop(query) +# !! + + +# > Step 02 Reasoning Loop +async def agent_reasoning_loop(query: str) -> dict: + llm = get_llm_service() + tools = get_tool_service() + messages = [{"role": "user", "content": query}] + for _ in range(10): + resp = llm.complete(messages) + if resp.get("done"): + return {"response": resp["content"]} + for tc in resp.get("tool_calls", []): + result = tools.run(tc["name"], tc.get("args", {})) + messages.append({"role": "tool", "content": result}) + return {"response": "Max iterations reached"} +# !! + + +# > Step 03 Stream Response +@hatchet.durable_task( + name="StreamingAgentTask", + concurrency=ConcurrencyExpression( + expression="input.session_id != null ? string(input.session_id) : 'constant'", + max_runs=1, + limit_strategy=ConcurrencyLimitStrategy.CANCEL_IN_PROGRESS, + ), +) +async def streaming_agent(input: EmptyModel, ctx: DurableContext) -> dict: + """Stream tokens to the client as they're generated.""" + tokens = ["Hello", " ", "world", "!"] + for t in tokens: + await ctx.aio_put_stream(t) + return {"done": True} + + +# !! + + +def main() -> None: + # > Step 04 Run Worker + worker = hatchet.worker( + "agent-worker", + workflows=[agent_task, streaming_agent], + slots=5, + ) + worker.start() + # !! + + +if __name__ == "__main__": + main() diff --git a/sdks/guides/python/batch_processing/worker.py b/sdks/guides/python/batch_processing/worker.py new file mode 100644 index 0000000000..d153f35957 --- /dev/null +++ b/sdks/guides/python/batch_processing/worker.py @@ -0,0 +1,56 @@ +from typing import Any + +from hatchet_sdk import Context, Hatchet +from pydantic import BaseModel + +hatchet = Hatchet(debug=True) + + +# > Step 01 Define Parent Task +class BatchInput(BaseModel): + items: list[str] + + +class ItemInput(BaseModel): + item_id: str + + +parent_wf = hatchet.workflow(name="BatchParent", input_validator=BatchInput) +child_wf = hatchet.workflow(name="BatchChild", input_validator=ItemInput) + + +@parent_wf.durable_task() +async def spawn_children(input: BatchInput, ctx: Context) -> dict[str, Any]: + """Parent fans out to one child per item.""" + results = await child_wf.aio_run_many( + [child_wf.create_bulk_run_item(input=ItemInput(item_id=item_id)) for item_id in input.items] + ) + return {"processed": len(results), "results": results} + + +# !! + + +# > Step 03 Process Item +@child_wf.task() +async def process_item(input: ItemInput, ctx: Context) -> dict[str, str]: + """Child processes a single item.""" + return {"status": "done", "item_id": input.item_id} + + +# !! + + +def main() -> None: + # > Step 04 Run Worker + worker = hatchet.worker( + "batch-worker", + slots=20, + workflows=[parent_wf, child_wf], + ) + worker.start() + # !! + + +if __name__ == "__main__": + main() diff --git a/sdks/guides/python/document_processing/llm_extract_service.py b/sdks/guides/python/document_processing/llm_extract_service.py new file mode 100644 index 0000000000..218ce5e3b4 --- /dev/null +++ b/sdks/guides/python/document_processing/llm_extract_service.py @@ -0,0 +1,37 @@ +"""Encapsulated LLM extraction - swap MockExtractService for OpenAI/Anthropic in production. + +See docs: /guides/document-processing +""" + +from abc import ABC, abstractmethod + + +class ExtractService(ABC): + """Interface for entity extraction from text. Implement with LLM or rules.""" + + @abstractmethod + def extract(self, text: str) -> list[str]: + """Extract entities from parsed text.""" + pass + + +class MockExtractService(ExtractService): + """No external API - returns placeholder entities for demos.""" + + def extract(self, text: str) -> list[str]: + return ["entity1", "entity2"] + + +_extract_service: ExtractService | None = None + + +def get_extract_service() -> ExtractService: + global _extract_service + if _extract_service is None: + _extract_service = MockExtractService() + return _extract_service + + +def set_extract_service(service: ExtractService) -> None: + global _extract_service + _extract_service = service diff --git a/sdks/guides/python/document_processing/mock_ocr.py b/sdks/guides/python/document_processing/mock_ocr.py new file mode 100644 index 0000000000..992157aadd --- /dev/null +++ b/sdks/guides/python/document_processing/mock_ocr.py @@ -0,0 +1,6 @@ +"""Mock OCR/parser - no external dependencies.""" + + +def parse_document(content: bytes) -> str: + """Mock: return placeholder text instead of real OCR.""" + return f"Parsed text from {len(content)} bytes" diff --git a/sdks/guides/python/document_processing/ocr_service.py b/sdks/guides/python/document_processing/ocr_service.py new file mode 100644 index 0000000000..ace102dbd7 --- /dev/null +++ b/sdks/guides/python/document_processing/ocr_service.py @@ -0,0 +1,37 @@ +"""Encapsulated OCR service - swap MockOCRService for Tesseract/Google Vision in production. + +See docs: /guides/document-processing +""" + +from abc import ABC, abstractmethod + + +class OCRService(ABC): + """Interface for document parsing. Implement with Tesseract, Google Vision, etc.""" + + @abstractmethod + def parse(self, content: bytes) -> str: + """Convert raw bytes (image/PDF) to text.""" + pass + + +class MockOCRService(OCRService): + """No external API - returns placeholder for demos.""" + + def parse(self, content: bytes) -> str: + return f"Parsed text from {len(content)} bytes" + + +_ocr_service: OCRService | None = None + + +def get_ocr_service() -> OCRService: + global _ocr_service + if _ocr_service is None: + _ocr_service = MockOCRService() + return _ocr_service + + +def set_ocr_service(service: OCRService) -> None: + global _ocr_service + _ocr_service = service diff --git a/sdks/guides/python/document_processing/worker.py b/sdks/guides/python/document_processing/worker.py new file mode 100644 index 0000000000..93477c9f17 --- /dev/null +++ b/sdks/guides/python/document_processing/worker.py @@ -0,0 +1,66 @@ +from typing import Any + +from hatchet_sdk import Context, Hatchet +from pydantic import BaseModel + +try: + from .llm_extract_service import get_extract_service + from .ocr_service import get_ocr_service +except ImportError: + from llm_extract_service import get_extract_service + from ocr_service import get_ocr_service + +hatchet = Hatchet(debug=True) + + +# > Step 01 Define DAG +class DocInput(BaseModel): + doc_id: str + content: bytes = b"" + + +doc_wf = hatchet.workflow(name="DocumentPipeline", input_validator=DocInput) + + +@doc_wf.task() +async def ingest(input: DocInput, ctx: Context) -> dict[str, Any]: + return {"doc_id": input.doc_id, "content": input.content} + + +# !! + + +# > Step 02 Parse Stage +@doc_wf.task(parents=[ingest]) +async def parse(input: DocInput, ctx: Context) -> dict[str, Any]: + ingested = ctx.task_output(ingest) + text = get_ocr_service().parse(ingested["content"]) + return {"doc_id": input.doc_id, "text": text} + + +# !! + + +# > Step 03 Extract Stage +@doc_wf.task(parents=[parse]) +async def extract(input: DocInput, ctx: Context) -> dict[str, Any]: + parsed = ctx.task_output(parse) + entities = get_extract_service().extract(parsed["text"]) + return {"doc_id": parsed["doc_id"], "entities": entities} + + +# !! + + +def main() -> None: + # > Step 04 Run Worker + worker = hatchet.worker( + "document-worker", + workflows=[doc_wf], + ) + worker.start() + # !! + + +if __name__ == "__main__": + main() diff --git a/sdks/guides/python/evaluator_optimizer/mock_llm.py b/sdks/guides/python/evaluator_optimizer/mock_llm.py new file mode 100644 index 0000000000..236ce6e5fb --- /dev/null +++ b/sdks/guides/python/evaluator_optimizer/mock_llm.py @@ -0,0 +1,17 @@ +"""Mock LLM for evaluator-optimizer - no external API dependencies.""" + +_generate_count = 0 + + +def mock_generate(prompt: str) -> str: + global _generate_count + _generate_count += 1 + if _generate_count == 1: + return "Check out our product! Buy now!" + return "Discover how our tool saves teams 10 hours/week. Try it free." + + +def mock_evaluate(draft: str) -> dict: + if len(draft) < 40: + return {"score": 0.4, "feedback": "Too short and pushy. Add a specific benefit and soften the CTA."} + return {"score": 0.9, "feedback": "Clear value prop, appropriate tone."} diff --git a/sdks/guides/python/evaluator_optimizer/worker.py b/sdks/guides/python/evaluator_optimizer/worker.py new file mode 100644 index 0000000000..baef0015c8 --- /dev/null +++ b/sdks/guides/python/evaluator_optimizer/worker.py @@ -0,0 +1,74 @@ +from hatchet_sdk import Context, DurableContext, EmptyModel, Hatchet + +try: + from .mock_llm import mock_evaluate, mock_generate +except ImportError: + from mock_llm import mock_evaluate, mock_generate + +hatchet = Hatchet(debug=True) + +generator_wf = hatchet.workflow(name="GenerateDraft") +evaluator_wf = hatchet.workflow(name="EvaluateDraft") + + +# > Step 01 Define Tasks +@generator_wf.task() +async def generate_draft(input: dict, ctx: Context) -> dict: + prompt = ( + f"Improve this draft.\n\nDraft: {input['previous_draft']}\nFeedback: {input['feedback']}" + if input.get("feedback") + else f"Write a social media post about \"{input['topic']}\" for {input['audience']}. Under 100 words." + ) + return {"draft": mock_generate(prompt)} + + +@evaluator_wf.task() +async def evaluate_draft(input: dict, ctx: Context) -> dict: + return mock_evaluate(input["draft"]) +# !! + + +# > Step 02 Optimization Loop +@hatchet.durable_task(name="EvaluatorOptimizer", execution_timeout="5m") +async def evaluator_optimizer(input: EmptyModel, ctx: DurableContext) -> dict: + max_iterations = 3 + threshold = 0.8 + draft = "" + feedback = "" + + for i in range(max_iterations): + generated = await generator_wf.aio_run( + input={ + "topic": input["topic"], + "audience": input["audience"], + "previous_draft": draft or None, + "feedback": feedback or None, + } + ) + draft = generated["draft"] + + evaluation = await evaluator_wf.aio_run( + input={"draft": draft, "topic": input["topic"], "audience": input["audience"]} + ) + + if evaluation["score"] >= threshold: + return {"draft": draft, "iterations": i + 1, "score": evaluation["score"]} + feedback = evaluation["feedback"] + + return {"draft": draft, "iterations": max_iterations, "score": -1} +# !! + + +def main() -> None: + # > Step 03 Run Worker + worker = hatchet.worker( + "evaluator-optimizer-worker", + workflows=[generator_wf, evaluator_wf, evaluator_optimizer], + slots=5, + ) + worker.start() + # !! + + +if __name__ == "__main__": + main() diff --git a/sdks/guides/python/event_driven/trigger.py b/sdks/guides/python/event_driven/trigger.py new file mode 100644 index 0000000000..b3c5c518e8 --- /dev/null +++ b/sdks/guides/python/event_driven/trigger.py @@ -0,0 +1,12 @@ +from hatchet_sdk import Hatchet + +hatchet = Hatchet(debug=True) + + +# > Step 03 Push Event +# Push an event to trigger the workflow. Use the same key as on_events. +hatchet.event.push( + "order:created", + {"message": "Order #1234", "source": "webhook"}, +) +# !! diff --git a/sdks/guides/python/event_driven/worker.py b/sdks/guides/python/event_driven/worker.py new file mode 100644 index 0000000000..7e406076f9 --- /dev/null +++ b/sdks/guides/python/event_driven/worker.py @@ -0,0 +1,46 @@ +from hatchet_sdk import Context, Hatchet +from pydantic import BaseModel + +hatchet = Hatchet(debug=True) + + +# > Step 01 Define Event Task +class EventInput(BaseModel): + message: str + source: str = "api" + + +event_wf = hatchet.workflow( + name="EventDrivenWorkflow", + input_validator=EventInput, + on_events=["order:created", "user:signup"], +) + + +@event_wf.task() +async def process_event(input: EventInput, ctx: Context) -> dict: + return {"processed": input.message, "source": input.source} + + +# !! + + +# > Step 02 Register Event Trigger +def push_order_event(): + """Push an event to trigger the workflow. Use the same key as on_events.""" + hatchet.event.push("order:created", {"message": "Order #1234", "source": "webhook"}) +# !! + + +def main() -> None: + # > Step 04 Run Worker + worker = hatchet.worker( + "event-driven-worker", + workflows=[event_wf], + ) + worker.start() + # !! + + +if __name__ == "__main__": + main() diff --git a/sdks/guides/python/human_in_the_loop/trigger.py b/sdks/guides/python/human_in_the_loop/trigger.py new file mode 100644 index 0000000000..d4a6c573a2 --- /dev/null +++ b/sdks/guides/python/human_in_the_loop/trigger.py @@ -0,0 +1,17 @@ +from hatchet_sdk import Hatchet + +hatchet = Hatchet(debug=True) + + +# > Step 03 Push Approval Event +# Include the run_id so the event matches the specific task waiting for it. +def push_approval(run_id: str, approved: bool, reason: str = "") -> None: + hatchet.event.push( + "approval:decision", + {"runId": run_id, "approved": approved, "reason": reason}, + ) + + +# Approve: push_approval("run-id-from-ui", True) +# Reject: push_approval("run-id-from-ui", False, reason="needs review") +# !! diff --git a/sdks/guides/python/human_in_the_loop/worker.py b/sdks/guides/python/human_in_the_loop/worker.py new file mode 100644 index 0000000000..2fe2576847 --- /dev/null +++ b/sdks/guides/python/human_in_the_loop/worker.py @@ -0,0 +1,44 @@ +from hatchet_sdk import DurableContext, EmptyModel, Hatchet, UserEventCondition + +hatchet = Hatchet(debug=True) + +APPROVAL_EVENT_KEY = "approval:decision" + + +# > Step 02 Wait For Event +async def wait_for_approval(ctx: DurableContext) -> dict: + run_id = ctx.workflow_run_id + approval = await ctx.aio_wait_for( + "approval", + UserEventCondition( + event_key=APPROVAL_EVENT_KEY, + expression=f"input.runId == '{run_id}'", + ), + ) + return approval +# !! + + +# > Step 01 Define Approval Task +@hatchet.durable_task(name="ApprovalTask") +async def approval_task(input: EmptyModel, ctx: DurableContext) -> dict: + proposed_action = {"action": "send_email", "to": "user@example.com"} + approval = await wait_for_approval(ctx) + if approval.get("approved"): + return {"status": "approved", "action": proposed_action} + return {"status": "rejected", "reason": approval.get("reason", "")} +# !! + + +def main() -> None: + # > Step 04 Run Worker + worker = hatchet.worker( + "human-in-the-loop-worker", + workflows=[approval_task], + ) + worker.start() + # !! + + +if __name__ == "__main__": + main() diff --git a/sdks/guides/python/integrations/embedding_cohere.py b/sdks/guides/python/integrations/embedding_cohere.py new file mode 100644 index 0000000000..7474e25a81 --- /dev/null +++ b/sdks/guides/python/integrations/embedding_cohere.py @@ -0,0 +1,13 @@ +# Third-party integration example - requires: pip install cohere +# See: /guides/rag-and-indexing + +import cohere + +client = cohere.Client() + + +# > Cohere embedding usage +def embed(text: str) -> list[float]: + r = client.embed(texts=[text], model="embed-english-v3.0", input_type="search_document") + return list(r.embeddings[0]) +# !! diff --git a/sdks/guides/python/integrations/embedding_openai.py b/sdks/guides/python/integrations/embedding_openai.py new file mode 100644 index 0000000000..ee7aaa7545 --- /dev/null +++ b/sdks/guides/python/integrations/embedding_openai.py @@ -0,0 +1,13 @@ +# Third-party integration example - requires: pip install openai +# See: /guides/rag-and-indexing + +from openai import OpenAI + +client = OpenAI() + + +# > OpenAI embedding usage +def embed(text: str) -> list[float]: + r = client.embeddings.create(model="text-embedding-3-small", input=text) + return r.data[0].embedding +# !! diff --git a/sdks/guides/python/integrations/llm_anthropic.py b/sdks/guides/python/integrations/llm_anthropic.py new file mode 100644 index 0000000000..93c1352239 --- /dev/null +++ b/sdks/guides/python/integrations/llm_anthropic.py @@ -0,0 +1,22 @@ +# Third-party integration example - requires: pip install anthropic +# See: /guides/ai-agents + +from anthropic import Anthropic + +client = Anthropic() + + +# > Anthropic usage +def complete(messages: list[dict]) -> dict: + resp = client.messages.create( + model="claude-3-5-haiku-20241022", + max_tokens=1024, + messages=[{"role": m["role"], "content": m["content"]} for m in messages], + tools=[{"name": "get_weather", "description": "Get weather", "input_schema": {"type": "object", "properties": {"location": {"type": "string"}}}}], + ) + for block in resp.content: + if block.type == "tool_use": + return {"content": "", "tool_calls": [{"name": block.name, "args": block.input}], "done": False} + text = "".join(b.text for b in resp.content if hasattr(b, "text")) + return {"content": text, "tool_calls": [], "done": True} +# !! diff --git a/sdks/guides/python/integrations/llm_groq.py b/sdks/guides/python/integrations/llm_groq.py new file mode 100644 index 0000000000..5f77a56492 --- /dev/null +++ b/sdks/guides/python/integrations/llm_groq.py @@ -0,0 +1,38 @@ +# Third-party integration - requires: pip install groq +# See: /guides/ai-agents + +import json + +from groq import Groq + +client = Groq() + + +# > Groq usage +def complete(messages: list[dict]) -> dict: + r = client.chat.completions.create( + model="llama-3.3-70b-versatile", + messages=messages, + tool_choice="auto", + tools=[ + { + "type": "function", + "function": { + "name": "get_weather", + "description": "Get weather for a location", + "parameters": { + "type": "object", + "properties": {"location": {"type": "string"}}, + "required": ["location"], + }, + }, + } + ], + ) + msg = r.choices[0].message + tool_calls = [ + {"name": tc.function.name, "args": json.loads(tc.function.arguments or "{}")} + for tc in (msg.tool_calls or []) + ] + return {"content": msg.content or "", "tool_calls": tool_calls, "done": not tool_calls} +# !! diff --git a/sdks/guides/python/integrations/llm_ollama.py b/sdks/guides/python/integrations/llm_ollama.py new file mode 100644 index 0000000000..8b5db299b6 --- /dev/null +++ b/sdks/guides/python/integrations/llm_ollama.py @@ -0,0 +1,13 @@ +# Third-party integration example - requires: pip install ollama; ollama run llama2 +# See: /guides/ai-agents + +import ollama + + +# > Ollama usage +def complete(messages: list[dict]) -> dict: + resp = ollama.chat(model="llama2", messages=messages) + content = resp.get("message", {}).get("content", "") + tool_calls = resp.get("message", {}).get("tool_calls") or [] + return {"content": content, "tool_calls": tool_calls, "done": not tool_calls} +# !! diff --git a/sdks/guides/python/integrations/llm_openai.py b/sdks/guides/python/integrations/llm_openai.py new file mode 100644 index 0000000000..d7499a367e --- /dev/null +++ b/sdks/guides/python/integrations/llm_openai.py @@ -0,0 +1,38 @@ +# Third-party integration example - requires: pip install openai +# See: /guides/ai-agents + +import json + +from openai import OpenAI + +client = OpenAI() + + +# > OpenAI usage +def complete(messages: list[dict]) -> dict: + r = client.chat.completions.create( + model="gpt-4o-mini", + messages=messages, + tool_choice="auto", + tools=[ + { + "type": "function", + "function": { + "name": "get_weather", + "description": "Get weather for a location", + "parameters": { + "type": "object", + "properties": {"location": {"type": "string"}}, + "required": ["location"], + }, + }, + } + ], + ) + msg = r.choices[0].message + tool_calls = [ + {"name": tc.function.name, "args": json.loads(tc.function.arguments or "{}")} + for tc in (msg.tool_calls or []) + ] + return {"content": msg.content or "", "tool_calls": tool_calls, "done": not tool_calls} +# !! diff --git a/sdks/guides/python/integrations/ocr_google_vision.py b/sdks/guides/python/integrations/ocr_google_vision.py new file mode 100644 index 0000000000..fbe69aab6a --- /dev/null +++ b/sdks/guides/python/integrations/ocr_google_vision.py @@ -0,0 +1,14 @@ +# Third-party integration example - requires: pip install google-cloud-vision +# See: /guides/document-processing + +from google.cloud import vision + +client = vision.ImageAnnotatorClient() + + +# > Google Vision usage +def parse_document(content: bytes) -> str: + image = vision.Image(content=content) + response = client.document_text_detection(image=image) + return response.full_text_annotation.text if response.full_text_annotation else "" +# !! diff --git a/sdks/guides/python/integrations/ocr_reducto.py b/sdks/guides/python/integrations/ocr_reducto.py new file mode 100644 index 0000000000..d053343d6e --- /dev/null +++ b/sdks/guides/python/integrations/ocr_reducto.py @@ -0,0 +1,15 @@ +# Third-party integration - requires: pip install reductoai +# See: /guides/document-processing +# Reducto: parse PDFs/images to structured content, extract with schema/prompt + +from reducto import Reducto + +client = Reducto() + + +# > Reducto usage +def parse_document(content: bytes) -> str: + upload = client.upload.upload(file=content, extension=".pdf") + result = client.parse.parse(input=upload.url) + return str(result) # or access result.blocks, result.tables, etc. +# !! diff --git a/sdks/guides/python/integrations/ocr_tesseract.py b/sdks/guides/python/integrations/ocr_tesseract.py new file mode 100644 index 0000000000..a6645ff5a5 --- /dev/null +++ b/sdks/guides/python/integrations/ocr_tesseract.py @@ -0,0 +1,14 @@ +# Third-party integration example - requires: pip install pytesseract; install Tesseract binary +# See: /guides/document-processing + +import io + +import pytesseract +from PIL import Image + + +# > Tesseract usage +def parse_document(content: bytes) -> str: + img = Image.open(io.BytesIO(content)) + return pytesseract.image_to_string(img) +# !! diff --git a/sdks/guides/python/integrations/ocr_unstructured.py b/sdks/guides/python/integrations/ocr_unstructured.py new file mode 100644 index 0000000000..a685245fa7 --- /dev/null +++ b/sdks/guides/python/integrations/ocr_unstructured.py @@ -0,0 +1,12 @@ +# Third-party integration - requires: pip install "unstructured[pdf]" +# See: /guides/document-processing +# Unstructured: open-source doc parsing for RAG, supports PDF, DOCX, images, etc. + +import io +from unstructured.partition.auto import partition + +# > Unstructured usage +def parse_document(content: bytes) -> str: + elements = partition(file=io.BytesIO(content)) + return "\n\n".join(str(el) for el in elements) +# !! diff --git a/sdks/guides/python/integrations/scraper_browserbase.py b/sdks/guides/python/integrations/scraper_browserbase.py new file mode 100644 index 0000000000..56ae3adba5 --- /dev/null +++ b/sdks/guides/python/integrations/scraper_browserbase.py @@ -0,0 +1,22 @@ +# Third-party integration example - requires: pip install playwright browserbase +# See: /guides/web-scraping + +import os + +from browserbase import Browserbase +from playwright.async_api import async_playwright + +bb = Browserbase(api_key=os.environ["BROWSERBASE_API_KEY"]) + + +# > Browserbase usage +async def scrape_url(url: str) -> dict: + session = bb.sessions.create(project_id=os.environ["BROWSERBASE_PROJECT_ID"]) + async with async_playwright() as pw: + browser = await pw.chromium.connect_over_cdp(session.connect_url) + page = browser.contexts[0].pages[0] + await page.goto(url) + content = await page.content() + await browser.close() + return {"url": url, "content": content} +# !! diff --git a/sdks/guides/python/integrations/scraper_firecrawl.py b/sdks/guides/python/integrations/scraper_firecrawl.py new file mode 100644 index 0000000000..e90b5d094b --- /dev/null +++ b/sdks/guides/python/integrations/scraper_firecrawl.py @@ -0,0 +1,19 @@ +# Third-party integration example - requires: pip install firecrawl-py +# See: /guides/web-scraping + +import os + +from firecrawl import FirecrawlApp + +firecrawl = FirecrawlApp(api_key=os.environ["FIRECRAWL_API_KEY"]) + + +# > Firecrawl usage +def scrape_url(url: str) -> dict: + result = firecrawl.scrape_url(url, params={"formats": ["markdown"]}) + return { + "url": url, + "content": result["markdown"], + "metadata": result.get("metadata", {}), + } +# !! diff --git a/sdks/guides/python/integrations/scraper_openai.py b/sdks/guides/python/integrations/scraper_openai.py new file mode 100644 index 0000000000..9cf43e7d89 --- /dev/null +++ b/sdks/guides/python/integrations/scraper_openai.py @@ -0,0 +1,17 @@ +# Third-party integration example - requires: pip install openai +# See: /guides/web-scraping + +from openai import OpenAI + +client = OpenAI() + + +# > OpenAI web search usage +def search_and_extract(query: str) -> dict: + response = client.responses.create( + model="gpt-4o-mini", + tools=[{"type": "web_search"}], + input=query, + ) + return {"query": query, "content": response.output_text} +# !! diff --git a/sdks/guides/python/integrations/scraper_playwright.py b/sdks/guides/python/integrations/scraper_playwright.py new file mode 100644 index 0000000000..1db2cfc10b --- /dev/null +++ b/sdks/guides/python/integrations/scraper_playwright.py @@ -0,0 +1,16 @@ +# Third-party integration example - requires: pip install playwright && playwright install +# See: /guides/web-scraping + +from playwright.async_api import async_playwright + + +# > Playwright usage +async def scrape_url(url: str) -> dict: + async with async_playwright() as pw: + browser = await pw.chromium.launch(headless=True) + page = await browser.new_page() + await page.goto(url) + content = await page.content() + await browser.close() + return {"url": url, "content": content} +# !! diff --git a/sdks/guides/python/llm_pipelines/llm_service.py b/sdks/guides/python/llm_pipelines/llm_service.py new file mode 100644 index 0000000000..e619e264ec --- /dev/null +++ b/sdks/guides/python/llm_pipelines/llm_service.py @@ -0,0 +1,37 @@ +"""Encapsulated LLM service - swap MockLLMService for OpenAI/Anthropic in production. + +See docs: /guides/llm-pipelines +""" + +from abc import ABC, abstractmethod + + +class LLMService(ABC): + """Interface for LLM generation. Implement with OpenAI, Anthropic, etc.""" + + @abstractmethod + def generate(self, prompt: str) -> dict: + """Generate from prompt. Returns {content, valid}.""" + pass + + +class MockLLMService(LLMService): + """No external API - for demos.""" + + def generate(self, prompt: str) -> dict: + return {"content": f"Generated for: {prompt[:50]}...", "valid": True} + + +_llm_service: LLMService | None = None + + +def get_llm_service() -> LLMService: + global _llm_service + if _llm_service is None: + _llm_service = MockLLMService() + return _llm_service + + +def set_llm_service(service: LLMService) -> None: + global _llm_service + _llm_service = service diff --git a/sdks/guides/python/llm_pipelines/mock_llm.py b/sdks/guides/python/llm_pipelines/mock_llm.py new file mode 100644 index 0000000000..dbcf31b808 --- /dev/null +++ b/sdks/guides/python/llm_pipelines/mock_llm.py @@ -0,0 +1,11 @@ +"""Mock LLM client - no external API dependencies.""" + + +def generate(prompt: str) -> dict: + """Mock: return placeholder instead of calling real LLM.""" + return {"content": f"Generated for: {prompt[:50]}...", "valid": True} + + +def validate(output: dict) -> bool: + """Mock: always valid.""" + return output.get("valid", False) diff --git a/sdks/guides/python/llm_pipelines/worker.py b/sdks/guides/python/llm_pipelines/worker.py new file mode 100644 index 0000000000..2df7c60bff --- /dev/null +++ b/sdks/guides/python/llm_pipelines/worker.py @@ -0,0 +1,58 @@ +from hatchet_sdk import Context, Hatchet +from pydantic import BaseModel + +try: + from .llm_service import get_llm_service +except ImportError: + from llm_service import get_llm_service + +hatchet = Hatchet(debug=True) + + +# > Step 01 Define Pipeline +class PipelineInput(BaseModel): + prompt: str + + +llm_wf = hatchet.workflow(name="LLMPipeline", input_validator=PipelineInput) + + +@llm_wf.task() +async def prompt_task(input: PipelineInput, ctx: Context) -> dict: + return {"prompt": input.prompt} + + +# !! + + +# > Step 02 Prompt Task +def _build_prompt(user_input: str, context: str = "") -> str: + return f"Process the following: {user_input}" + (f"\nContext: {context}" if context else "") +# !! + + +# > Step 03 Validate Task +@llm_wf.task(parents=[prompt_task]) +async def generate_task(input: PipelineInput, ctx: Context) -> dict: + prev = ctx.task_output(prompt_task) + output = get_llm_service().generate(prev["prompt"]) + if not output.get("valid"): + raise ValueError("Validation failed") + return output + + +# !! + + +def main() -> None: + # > Step 04 Run Worker + worker = hatchet.worker( + "llm-pipeline-worker", + workflows=[llm_wf], + ) + worker.start() + # !! + + +if __name__ == "__main__": + main() diff --git a/sdks/guides/python/multi_agent/mock_llm.py b/sdks/guides/python/multi_agent/mock_llm.py new file mode 100644 index 0000000000..6f948889e8 --- /dev/null +++ b/sdks/guides/python/multi_agent/mock_llm.py @@ -0,0 +1,17 @@ +"""Mock LLM for multi-agent orchestration - no external API dependencies.""" + +_orchestrator_call_count = 0 + + +def mock_orchestrator_llm(messages: list[dict]) -> dict: + global _orchestrator_call_count + _orchestrator_call_count += 1 + if _orchestrator_call_count == 1: + return {"done": False, "content": "", "tool_call": {"name": "research", "args": {"task": "Find key facts about the topic"}}} + if _orchestrator_call_count == 2: + return {"done": False, "content": "", "tool_call": {"name": "writing", "args": {"task": "Write a summary from the research"}}} + return {"done": True, "content": "Here is the final report combining research and writing."} + + +def mock_specialist_llm(task: str, role: str) -> str: + return f"[{role}] Completed: {task}" diff --git a/sdks/guides/python/multi_agent/worker.py b/sdks/guides/python/multi_agent/worker.py new file mode 100644 index 0000000000..ab971e50fa --- /dev/null +++ b/sdks/guides/python/multi_agent/worker.py @@ -0,0 +1,74 @@ +from hatchet_sdk import DurableContext, EmptyModel, Hatchet + +try: + from .mock_llm import mock_orchestrator_llm, mock_specialist_llm +except ImportError: + from mock_llm import mock_orchestrator_llm, mock_specialist_llm + +hatchet = Hatchet(debug=True) + + +# > Step 01 Specialist Agents +@hatchet.durable_task(name="ResearchSpecialist", execution_timeout="3m") +async def research(input: EmptyModel, ctx: DurableContext) -> dict: + return {"result": mock_specialist_llm(input["task"], "research")} + + +@hatchet.durable_task(name="WritingSpecialist", execution_timeout="2m") +async def write(input: EmptyModel, ctx: DurableContext) -> dict: + return {"result": mock_specialist_llm(input["task"], "writing")} + + +@hatchet.durable_task(name="CodeSpecialist", execution_timeout="2m") +async def code(input: EmptyModel, ctx: DurableContext) -> dict: + return {"result": mock_specialist_llm(input["task"], "code")} +# !! + + +specialists = { + "research": research, + "writing": write, + "code": code, +} + + +# > Step 02 Orchestrator Loop +@hatchet.durable_task(name="MultiAgentOrchestrator", execution_timeout="15m") +async def orchestrator(input: EmptyModel, ctx: DurableContext) -> dict: + messages = [{"role": "user", "content": input["goal"]}] + + for _ in range(10): + response = mock_orchestrator_llm(messages) + + if response["done"]: + return {"result": response["content"]} + + specialist = specialists.get(response["tool_call"]["name"]) + if not specialist: + raise ValueError(f"Unknown specialist: {response['tool_call']['name']}") + + result = await specialist.aio_run(input={ + "task": response["tool_call"]["args"]["task"], + "context": "\n".join(m["content"] for m in messages), + }) + + messages.append({"role": "assistant", "content": f"Called {response['tool_call']['name']}"}) + messages.append({"role": "tool", "content": result["result"]}) + + return {"result": "Max iterations reached"} +# !! + + +def main() -> None: + # > Step 03 Run Worker + worker = hatchet.worker( + "multi-agent-worker", + workflows=[research, write, code, orchestrator], + slots=10, + ) + worker.start() + # !! + + +if __name__ == "__main__": + main() diff --git a/sdks/guides/python/parallelization/mock_llm.py b/sdks/guides/python/parallelization/mock_llm.py new file mode 100644 index 0000000000..b1ad21555f --- /dev/null +++ b/sdks/guides/python/parallelization/mock_llm.py @@ -0,0 +1,16 @@ +"""Mock LLM for parallelization - no external API dependencies.""" + + +def mock_generate_content(message: str) -> str: + return f"Here is a helpful response to: {message}" + + +def mock_safety_check(message: str) -> dict: + if "unsafe" in message.lower(): + return {"safe": False, "reason": "Content flagged as potentially unsafe."} + return {"safe": True, "reason": "Content is appropriate."} + + +def mock_evaluate(content: str) -> dict: + score = 0.85 if len(content) > 20 else 0.3 + return {"score": score, "approved": score >= 0.7} diff --git a/sdks/guides/python/parallelization/worker.py b/sdks/guides/python/parallelization/worker.py new file mode 100644 index 0000000000..409778d01d --- /dev/null +++ b/sdks/guides/python/parallelization/worker.py @@ -0,0 +1,76 @@ +import asyncio + +from hatchet_sdk import Context, DurableContext, EmptyModel, Hatchet + +try: + from .mock_llm import mock_evaluate, mock_generate_content, mock_safety_check +except ImportError: + from mock_llm import mock_evaluate, mock_generate_content, mock_safety_check + +hatchet = Hatchet(debug=True) + +content_wf = hatchet.workflow(name="GenerateContent") +safety_wf = hatchet.workflow(name="SafetyCheck") +evaluator_wf = hatchet.workflow(name="EvaluateContent") + + +# > Step 01 Parallel Tasks +@content_wf.task() +async def generate_content(input: dict, ctx: Context) -> dict: + return {"content": mock_generate_content(input["message"])} + + +@safety_wf.task() +async def safety_check(input: dict, ctx: Context) -> dict: + return mock_safety_check(input["message"]) + + +@evaluator_wf.task() +async def evaluate_content(input: dict, ctx: Context) -> dict: + return mock_evaluate(input["content"]) +# !! + + +# > Step 02 Sectioning +@hatchet.durable_task(name="ParallelSectioning", execution_timeout="2m") +async def sectioning_task(input: EmptyModel, ctx: DurableContext) -> dict: + content_result, safety_result = await asyncio.gather( + content_wf.aio_run(input={"message": input["message"]}), + safety_wf.aio_run(input={"message": input["message"]}), + ) + + if not safety_result["safe"]: + return {"blocked": True, "reason": safety_result["reason"]} + return {"blocked": False, "content": content_result["content"]} +# !! + + +# > Step 03 Voting +@hatchet.durable_task(name="ParallelVoting", execution_timeout="3m") +async def voting_task(input: EmptyModel, ctx: DurableContext) -> dict: + votes = await asyncio.gather( + evaluator_wf.aio_run(input={"content": input["content"]}), + evaluator_wf.aio_run(input={"content": input["content"]}), + evaluator_wf.aio_run(input={"content": input["content"]}), + ) + + approvals = sum(1 for v in votes if v["approved"]) + avg_score = sum(v["score"] for v in votes) / len(votes) + + return {"approved": approvals >= 2, "average_score": avg_score, "votes": len(votes)} +# !! + + +def main() -> None: + # > Step 04 Run Worker + worker = hatchet.worker( + "parallelization-worker", + workflows=[content_wf, safety_wf, evaluator_wf, sectioning_task, voting_task], + slots=10, + ) + worker.start() + # !! + + +if __name__ == "__main__": + main() diff --git a/sdks/guides/python/poetry.lock b/sdks/guides/python/poetry.lock new file mode 100644 index 0000000000..71c7f86808 --- /dev/null +++ b/sdks/guides/python/poetry.lock @@ -0,0 +1,2706 @@ +# This file is automatically @generated by Poetry 2.2.1 and should not be changed by hand. + +[[package]] +name = "aiohappyeyeballs" +version = "2.6.1" +description = "Happy Eyeballs for asyncio" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "aiohappyeyeballs-2.6.1-py3-none-any.whl", hash = "sha256:f349ba8f4b75cb25c99c5c2d84e997e485204d2902a9597802b0371f09331fb8"}, + {file = "aiohappyeyeballs-2.6.1.tar.gz", hash = "sha256:c3f9d0113123803ccadfdf3f0faa505bc78e6a72d1cc4806cbd719826e943558"}, +] + +[[package]] +name = "aiohttp" +version = "3.13.3" +description = "Async http client/server framework (asyncio)" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "aiohttp-3.13.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d5a372fd5afd301b3a89582817fdcdb6c34124787c70dbcc616f259013e7eef7"}, + {file = "aiohttp-3.13.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:147e422fd1223005c22b4fe080f5d93ced44460f5f9c105406b753612b587821"}, + {file = "aiohttp-3.13.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:859bd3f2156e81dd01432f5849fc73e2243d4a487c4fd26609b1299534ee1845"}, + {file = "aiohttp-3.13.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:dca68018bf48c251ba17c72ed479f4dafe9dbd5a73707ad8d28a38d11f3d42af"}, + {file = "aiohttp-3.13.3-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:fee0c6bc7db1de362252affec009707a17478a00ec69f797d23ca256e36d5940"}, + {file = "aiohttp-3.13.3-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c048058117fd649334d81b4b526e94bde3ccaddb20463a815ced6ecbb7d11160"}, + {file = "aiohttp-3.13.3-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:215a685b6fbbfcf71dfe96e3eba7a6f58f10da1dfdf4889c7dd856abe430dca7"}, + {file = "aiohttp-3.13.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:de2c184bb1fe2cbd2cefba613e9db29a5ab559323f994b6737e370d3da0ac455"}, + {file = "aiohttp-3.13.3-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:75ca857eba4e20ce9f546cd59c7007b33906a4cd48f2ff6ccf1ccfc3b646f279"}, + {file = "aiohttp-3.13.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:81e97251d9298386c2b7dbeb490d3d1badbdc69107fb8c9299dd04eb39bddc0e"}, + {file = "aiohttp-3.13.3-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:c0e2d366af265797506f0283487223146af57815b388623f0357ef7eac9b209d"}, + {file = "aiohttp-3.13.3-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:4e239d501f73d6db1522599e14b9b321a7e3b1de66ce33d53a765d975e9f4808"}, + {file = "aiohttp-3.13.3-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:0db318f7a6f065d84cb1e02662c526294450b314a02bd9e2a8e67f0d8564ce40"}, + {file = "aiohttp-3.13.3-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:bfc1cc2fe31a6026a8a88e4ecfb98d7f6b1fec150cfd708adbfd1d2f42257c29"}, + {file = "aiohttp-3.13.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:af71fff7bac6bb7508956696dce8f6eec2bbb045eceb40343944b1ae62b5ef11"}, + {file = "aiohttp-3.13.3-cp310-cp310-win32.whl", hash = "sha256:37da61e244d1749798c151421602884db5270faf479cf0ef03af0ff68954c9dd"}, + {file = "aiohttp-3.13.3-cp310-cp310-win_amd64.whl", hash = "sha256:7e63f210bc1b57ef699035f2b4b6d9ce096b5914414a49b0997c839b2bd2223c"}, + {file = "aiohttp-3.13.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5b6073099fb654e0a068ae678b10feff95c5cae95bbfcbfa7af669d361a8aa6b"}, + {file = "aiohttp-3.13.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1cb93e166e6c28716c8c6aeb5f99dfb6d5ccf482d29fe9bf9a794110e6d0ab64"}, + {file = "aiohttp-3.13.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:28e027cf2f6b641693a09f631759b4d9ce9165099d2b5d92af9bd4e197690eea"}, + {file = "aiohttp-3.13.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3b61b7169ababd7802f9568ed96142616a9118dd2be0d1866e920e77ec8fa92a"}, + {file = "aiohttp-3.13.3-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:80dd4c21b0f6237676449c6baaa1039abae86b91636b6c91a7f8e61c87f89540"}, + {file = "aiohttp-3.13.3-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:65d2ccb7eabee90ce0503c17716fc77226be026dcc3e65cce859a30db715025b"}, + {file = "aiohttp-3.13.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5b179331a481cb5529fca8b432d8d3c7001cb217513c94cd72d668d1248688a3"}, + {file = "aiohttp-3.13.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9d4c940f02f49483b18b079d1c27ab948721852b281f8b015c058100e9421dd1"}, + {file = "aiohttp-3.13.3-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f9444f105664c4ce47a2a7171a2418bce5b7bae45fb610f4e2c36045d85911d3"}, + {file = "aiohttp-3.13.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:694976222c711d1d00ba131904beb60534f93966562f64440d0c9d41b8cdb440"}, + {file = "aiohttp-3.13.3-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:f33ed1a2bf1997a36661874b017f5c4b760f41266341af36febaf271d179f6d7"}, + {file = "aiohttp-3.13.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e636b3c5f61da31a92bf0d91da83e58fdfa96f178ba682f11d24f31944cdd28c"}, + {file = "aiohttp-3.13.3-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:5d2d94f1f5fcbe40838ac51a6ab5704a6f9ea42e72ceda48de5e6b898521da51"}, + {file = "aiohttp-3.13.3-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:2be0e9ccf23e8a94f6f0650ce06042cefc6ac703d0d7ab6c7a917289f2539ad4"}, + {file = "aiohttp-3.13.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9af5e68ee47d6534d36791bbe9b646d2a7c7deb6fc24d7943628edfbb3581f29"}, + {file = "aiohttp-3.13.3-cp311-cp311-win32.whl", hash = "sha256:a2212ad43c0833a873d0fb3c63fa1bacedd4cf6af2fee62bf4b739ceec3ab239"}, + {file = "aiohttp-3.13.3-cp311-cp311-win_amd64.whl", hash = "sha256:642f752c3eb117b105acbd87e2c143de710987e09860d674e068c4c2c441034f"}, + {file = "aiohttp-3.13.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:b903a4dfee7d347e2d87697d0713be59e0b87925be030c9178c5faa58ea58d5c"}, + {file = "aiohttp-3.13.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a45530014d7a1e09f4a55f4f43097ba0fd155089372e105e4bff4ca76cb1b168"}, + {file = "aiohttp-3.13.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:27234ef6d85c914f9efeb77ff616dbf4ad2380be0cda40b4db086ffc7ddd1b7d"}, + {file = "aiohttp-3.13.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d32764c6c9aafb7fb55366a224756387cd50bfa720f32b88e0e6fa45b27dcf29"}, + {file = "aiohttp-3.13.3-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:b1a6102b4d3ebc07dad44fbf07b45bb600300f15b552ddf1851b5390202ea2e3"}, + {file = "aiohttp-3.13.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c014c7ea7fb775dd015b2d3137378b7be0249a448a1612268b5a90c2d81de04d"}, + {file = "aiohttp-3.13.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2b8d8ddba8f95ba17582226f80e2de99c7a7948e66490ef8d947e272a93e9463"}, + {file = "aiohttp-3.13.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9ae8dd55c8e6c4257eae3a20fd2c8f41edaea5992ed67156642493b8daf3cecc"}, + {file = "aiohttp-3.13.3-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:01ad2529d4b5035578f5081606a465f3b814c542882804e2e8cda61adf5c71bf"}, + {file = "aiohttp-3.13.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:bb4f7475e359992b580559e008c598091c45b5088f28614e855e42d39c2f1033"}, + {file = "aiohttp-3.13.3-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:c19b90316ad3b24c69cd78d5c9b4f3aa4497643685901185b65166293d36a00f"}, + {file = "aiohttp-3.13.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:96d604498a7c782cb15a51c406acaea70d8c027ee6b90c569baa6e7b93073679"}, + {file = "aiohttp-3.13.3-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:084911a532763e9d3dd95adf78a78f4096cd5f58cdc18e6fdbc1b58417a45423"}, + {file = "aiohttp-3.13.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:7a4a94eb787e606d0a09404b9c38c113d3b099d508021faa615d70a0131907ce"}, + {file = "aiohttp-3.13.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:87797e645d9d8e222e04160ee32aa06bc5c163e8499f24db719e7852ec23093a"}, + {file = "aiohttp-3.13.3-cp312-cp312-win32.whl", hash = "sha256:b04be762396457bef43f3597c991e192ee7da460a4953d7e647ee4b1c28e7046"}, + {file = "aiohttp-3.13.3-cp312-cp312-win_amd64.whl", hash = "sha256:e3531d63d3bdfa7e3ac5e9b27b2dd7ec9df3206a98e0b3445fa906f233264c57"}, + {file = "aiohttp-3.13.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:5dff64413671b0d3e7d5918ea490bdccb97a4ad29b3f311ed423200b2203e01c"}, + {file = "aiohttp-3.13.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:87b9aab6d6ed88235aa2970294f496ff1a1f9adcd724d800e9b952395a80ffd9"}, + {file = "aiohttp-3.13.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:425c126c0dc43861e22cb1c14ba4c8e45d09516d0a3ae0a3f7494b79f5f233a3"}, + {file = "aiohttp-3.13.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7f9120f7093c2a32d9647abcaf21e6ad275b4fbec5b55969f978b1a97c7c86bf"}, + {file = "aiohttp-3.13.3-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:697753042d57f4bf7122cab985bf15d0cef23c770864580f5af4f52023a56bd6"}, + {file = "aiohttp-3.13.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:6de499a1a44e7de70735d0b39f67c8f25eb3d91eb3103be99ca0fa882cdd987d"}, + {file = "aiohttp-3.13.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:37239e9f9a7ea9ac5bf6b92b0260b01f8a22281996da609206a84df860bc1261"}, + {file = "aiohttp-3.13.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f76c1e3fe7d7c8afad7ed193f89a292e1999608170dcc9751a7462a87dfd5bc0"}, + {file = "aiohttp-3.13.3-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:fc290605db2a917f6e81b0e1e0796469871f5af381ce15c604a3c5c7e51cb730"}, + {file = "aiohttp-3.13.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4021b51936308aeea0367b8f006dc999ca02bc118a0cc78c303f50a2ff6afb91"}, + {file = "aiohttp-3.13.3-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:49a03727c1bba9a97d3e93c9f93ca03a57300f484b6e935463099841261195d3"}, + {file = "aiohttp-3.13.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:3d9908a48eb7416dc1f4524e69f1d32e5d90e3981e4e37eb0aa1cd18f9cfa2a4"}, + {file = "aiohttp-3.13.3-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:2712039939ec963c237286113c68dbad80a82a4281543f3abf766d9d73228998"}, + {file = "aiohttp-3.13.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:7bfdc049127717581866fa4708791220970ce291c23e28ccf3922c700740fdc0"}, + {file = "aiohttp-3.13.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8057c98e0c8472d8846b9c79f56766bcc57e3e8ac7bfd510482332366c56c591"}, + {file = "aiohttp-3.13.3-cp313-cp313-win32.whl", hash = "sha256:1449ceddcdbcf2e0446957863af03ebaaa03f94c090f945411b61269e2cb5daf"}, + {file = "aiohttp-3.13.3-cp313-cp313-win_amd64.whl", hash = "sha256:693781c45a4033d31d4187d2436f5ac701e7bbfe5df40d917736108c1cc7436e"}, + {file = "aiohttp-3.13.3-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:ea37047c6b367fd4bd632bff8077449b8fa034b69e812a18e0132a00fae6e808"}, + {file = "aiohttp-3.13.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:6fc0e2337d1a4c3e6acafda6a78a39d4c14caea625124817420abceed36e2415"}, + {file = "aiohttp-3.13.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c685f2d80bb67ca8c3837823ad76196b3694b0159d232206d1e461d3d434666f"}, + {file = "aiohttp-3.13.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:48e377758516d262bde50c2584fc6c578af272559c409eecbdd2bae1601184d6"}, + {file = "aiohttp-3.13.3-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:34749271508078b261c4abb1767d42b8d0c0cc9449c73a4df494777dc55f0687"}, + {file = "aiohttp-3.13.3-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:82611aeec80eb144416956ec85b6ca45a64d76429c1ed46ae1b5f86c6e0c9a26"}, + {file = "aiohttp-3.13.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2fff83cfc93f18f215896e3a190e8e5cb413ce01553901aca925176e7568963a"}, + {file = "aiohttp-3.13.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bbe7d4cecacb439e2e2a8a1a7b935c25b812af7a5fd26503a66dadf428e79ec1"}, + {file = "aiohttp-3.13.3-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:b928f30fe49574253644b1ca44b1b8adbd903aa0da4b9054a6c20fc7f4092a25"}, + {file = "aiohttp-3.13.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7b5e8fe4de30df199155baaf64f2fcd604f4c678ed20910db8e2c66dc4b11603"}, + {file = "aiohttp-3.13.3-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:8542f41a62bcc58fc7f11cf7c90e0ec324ce44950003feb70640fc2a9092c32a"}, + {file = "aiohttp-3.13.3-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:5e1d8c8b8f1d91cd08d8f4a3c2b067bfca6ec043d3ff36de0f3a715feeedf926"}, + {file = "aiohttp-3.13.3-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:90455115e5da1c3c51ab619ac57f877da8fd6d73c05aacd125c5ae9819582aba"}, + {file = "aiohttp-3.13.3-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:042e9e0bcb5fba81886c8b4fbb9a09d6b8a00245fd8d88e4d989c1f96c74164c"}, + {file = "aiohttp-3.13.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:2eb752b102b12a76ca02dff751a801f028b4ffbbc478840b473597fc91a9ed43"}, + {file = "aiohttp-3.13.3-cp314-cp314-win32.whl", hash = "sha256:b556c85915d8efaed322bf1bdae9486aa0f3f764195a0fb6ee962e5c71ef5ce1"}, + {file = "aiohttp-3.13.3-cp314-cp314-win_amd64.whl", hash = "sha256:9bf9f7a65e7aa20dd764151fb3d616c81088f91f8df39c3893a536e279b4b984"}, + {file = "aiohttp-3.13.3-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:05861afbbec40650d8a07ea324367cb93e9e8cc7762e04dd4405df99fa65159c"}, + {file = "aiohttp-3.13.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:2fc82186fadc4a8316768d61f3722c230e2c1dcab4200d52d2ebdf2482e47592"}, + {file = "aiohttp-3.13.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:0add0900ff220d1d5c5ebbf99ed88b0c1bbf87aa7e4262300ed1376a6b13414f"}, + {file = "aiohttp-3.13.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:568f416a4072fbfae453dcf9a99194bbb8bdeab718e08ee13dfa2ba0e4bebf29"}, + {file = "aiohttp-3.13.3-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:add1da70de90a2569c5e15249ff76a631ccacfe198375eead4aadf3b8dc849dc"}, + {file = "aiohttp-3.13.3-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:10b47b7ba335d2e9b1239fa571131a87e2d8ec96b333e68b2a305e7a98b0bae2"}, + {file = "aiohttp-3.13.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:3dd4dce1c718e38081c8f35f323209d4c1df7d4db4bab1b5c88a6b4d12b74587"}, + {file = "aiohttp-3.13.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:34bac00a67a812570d4a460447e1e9e06fae622946955f939051e7cc895cfab8"}, + {file = "aiohttp-3.13.3-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a19884d2ee70b06d9204b2727a7b9f983d0c684c650254679e716b0b77920632"}, + {file = "aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:5f8ca7f2bb6ba8348a3614c7918cc4bb73268c5ac2a207576b7afea19d3d9f64"}, + {file = "aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:b0d95340658b9d2f11d9697f59b3814a9d3bb4b7a7c20b131df4bcef464037c0"}, + {file = "aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:a1e53262fd202e4b40b70c3aff944a8155059beedc8a89bba9dc1f9ef06a1b56"}, + {file = "aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:d60ac9663f44168038586cab2157e122e46bdef09e9368b37f2d82d354c23f72"}, + {file = "aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:90751b8eed69435bac9ff4e3d2f6b3af1f57e37ecb0fbeee59c0174c9e2d41df"}, + {file = "aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:fc353029f176fd2b3ec6cfc71be166aba1936fe5d73dd1992ce289ca6647a9aa"}, + {file = "aiohttp-3.13.3-cp314-cp314t-win32.whl", hash = "sha256:2e41b18a58da1e474a057b3d35248d8320029f61d70a37629535b16a0c8f3767"}, + {file = "aiohttp-3.13.3-cp314-cp314t-win_amd64.whl", hash = "sha256:44531a36aa2264a1860089ffd4dce7baf875ee5a6079d5fb42e261c704ef7344"}, + {file = "aiohttp-3.13.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:31a83ea4aead760dfcb6962efb1d861db48c34379f2ff72db9ddddd4cda9ea2e"}, + {file = "aiohttp-3.13.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:988a8c5e317544fdf0d39871559e67b6341065b87fceac641108c2096d5506b7"}, + {file = "aiohttp-3.13.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9b174f267b5cfb9a7dba9ee6859cecd234e9a681841eb85068059bc867fb8f02"}, + {file = "aiohttp-3.13.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:947c26539750deeaee933b000fb6517cc770bbd064bad6033f1cff4803881e43"}, + {file = "aiohttp-3.13.3-cp39-cp39-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:9ebf57d09e131f5323464bd347135a88622d1c0976e88ce15b670e7ad57e4bd6"}, + {file = "aiohttp-3.13.3-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:4ae5b5a0e1926e504c81c5b84353e7a5516d8778fbbff00429fe7b05bb25cbce"}, + {file = "aiohttp-3.13.3-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2ba0eea45eb5cc3172dbfc497c066f19c41bac70963ea1a67d51fc92e4cf9a80"}, + {file = "aiohttp-3.13.3-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bae5c2ed2eae26cc382020edad80d01f36cb8e746da40b292e68fec40421dc6a"}, + {file = "aiohttp-3.13.3-cp39-cp39-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:8a60e60746623925eab7d25823329941aee7242d559baa119ca2b253c88a7bd6"}, + {file = "aiohttp-3.13.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:e50a2e1404f063427c9d027378472316201a2290959a295169bcf25992d04558"}, + {file = "aiohttp-3.13.3-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:9a9dc347e5a3dc7dfdbc1f82da0ef29e388ddb2ed281bfce9dd8248a313e62b7"}, + {file = "aiohttp-3.13.3-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:b46020d11d23fe16551466c77823df9cc2f2c1e63cc965daf67fa5eec6ca1877"}, + {file = "aiohttp-3.13.3-cp39-cp39-musllinux_1_2_riscv64.whl", hash = "sha256:69c56fbc1993fa17043e24a546959c0178fe2b5782405ad4559e6c13975c15e3"}, + {file = "aiohttp-3.13.3-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:b99281b0704c103d4e11e72a76f1b543d4946fea7dd10767e7e1b5f00d4e5704"}, + {file = "aiohttp-3.13.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:40c5e40ecc29ba010656c18052b877a1c28f84344825efa106705e835c28530f"}, + {file = "aiohttp-3.13.3-cp39-cp39-win32.whl", hash = "sha256:56339a36b9f1fc708260c76c87e593e2afb30d26de9ae1eb445b5e051b98a7a1"}, + {file = "aiohttp-3.13.3-cp39-cp39-win_amd64.whl", hash = "sha256:c6b8568a3bb5819a0ad087f16d40e5a3fb6099f39ea1d5625a3edc1e923fc538"}, + {file = "aiohttp-3.13.3.tar.gz", hash = "sha256:a949eee43d3782f2daae4f4a2819b2cb9b0c5d3b7f7a927067cc84dafdbb9f88"}, +] + +[package.dependencies] +aiohappyeyeballs = ">=2.5.0" +aiosignal = ">=1.4.0" +async-timeout = {version = ">=4.0,<6.0", markers = "python_version < \"3.11\""} +attrs = ">=17.3.0" +frozenlist = ">=1.1.1" +multidict = ">=4.5,<7.0" +propcache = ">=0.2.0" +yarl = ">=1.17.0,<2.0" + +[package.extras] +speedups = ["Brotli (>=1.2) ; platform_python_implementation == \"CPython\"", "aiodns (>=3.3.0)", "backports.zstd ; platform_python_implementation == \"CPython\" and python_version < \"3.14\"", "brotlicffi (>=1.2) ; platform_python_implementation != \"CPython\""] + +[[package]] +name = "aiosignal" +version = "1.4.0" +description = "aiosignal: a list of registered asynchronous callbacks" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "aiosignal-1.4.0-py3-none-any.whl", hash = "sha256:053243f8b92b990551949e63930a839ff0cf0b0ebbe0597b0f3fb19e1a0fe82e"}, + {file = "aiosignal-1.4.0.tar.gz", hash = "sha256:f47eecd9468083c2029cc99945502cb7708b082c232f9aca65da147157b251c7"}, +] + +[package.dependencies] +frozenlist = ">=1.1.0" +typing-extensions = {version = ">=4.2", markers = "python_version < \"3.13\""} + +[[package]] +name = "annotated-doc" +version = "0.0.4" +description = "Document parameters, class attributes, return types, and variables inline, with Annotated." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "annotated_doc-0.0.4-py3-none-any.whl", hash = "sha256:571ac1dc6991c450b25a9c2d84a3705e2ae7a53467b5d111c24fa8baabbed320"}, + {file = "annotated_doc-0.0.4.tar.gz", hash = "sha256:fbcda96e87e9c92ad167c2e53839e57503ecfda18804ea28102353485033faa4"}, +] + +[[package]] +name = "annotated-types" +version = "0.7.0" +description = "Reusable constraint types to use with typing.Annotated" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, + {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, +] + +[[package]] +name = "anthropic" +version = "0.39.0" +description = "The official Python library for the anthropic API" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "anthropic-0.39.0-py3-none-any.whl", hash = "sha256:ea17093ae0ce0e1768b0c46501d6086b5bcd74ff39d68cd2d6396374e9de7c09"}, + {file = "anthropic-0.39.0.tar.gz", hash = "sha256:94671cc80765f9ce693f76d63a97ee9bef4c2d6063c044e983d21a2e262f63ba"}, +] + +[package.dependencies] +anyio = ">=3.5.0,<5" +distro = ">=1.7.0,<2" +httpx = ">=0.23.0,<1" +jiter = ">=0.4.0,<1" +pydantic = ">=1.9.0,<3" +sniffio = "*" +typing-extensions = ">=4.7,<5" + +[package.extras] +bedrock = ["boto3 (>=1.28.57)", "botocore (>=1.31.57)"] +vertex = ["google-auth (>=2,<3)"] + +[[package]] +name = "anyio" +version = "4.12.1" +description = "High-level concurrency and networking framework on top of asyncio or Trio" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "anyio-4.12.1-py3-none-any.whl", hash = "sha256:d405828884fc140aa80a3c667b8beed277f1dfedec42ba031bd6ac3db606ab6c"}, + {file = "anyio-4.12.1.tar.gz", hash = "sha256:41cfcc3a4c85d3f05c932da7c26d0201ac36f72abd4435ba90d0464a3ffed703"}, +] + +[package.dependencies] +exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} +idna = ">=2.8" +typing_extensions = {version = ">=4.5", markers = "python_version < \"3.13\""} + +[package.extras] +trio = ["trio (>=0.31.0) ; python_version < \"3.10\"", "trio (>=0.32.0) ; python_version >= \"3.10\""] + +[[package]] +name = "async-timeout" +version = "5.0.1" +description = "Timeout context manager for asyncio programs" +optional = false +python-versions = ">=3.8" +groups = ["main"] +markers = "python_version == \"3.10\"" +files = [ + {file = "async_timeout-5.0.1-py3-none-any.whl", hash = "sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c"}, + {file = "async_timeout-5.0.1.tar.gz", hash = "sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3"}, +] + +[[package]] +name = "attrs" +version = "25.4.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "attrs-25.4.0-py3-none-any.whl", hash = "sha256:adcf7e2a1fb3b36ac48d97835bb6d8ade15b8dcce26aba8bf1d14847b57a3373"}, + {file = "attrs-25.4.0.tar.gz", hash = "sha256:16d5969b87f0859ef33a48b35d55ac1be6e42ae49d5e853b597db70c35c57e11"}, +] + +[[package]] +name = "browserbase" +version = "1.4.0" +description = "The official Python library for the Browserbase API" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "browserbase-1.4.0-py3-none-any.whl", hash = "sha256:ea9f1fb4a88921975b8b9606835c441a59d8ce82ce00313a6d48bbe8e30f79fb"}, + {file = "browserbase-1.4.0.tar.gz", hash = "sha256:e2ed36f513c8630b94b826042c4bb9f497c333f3bd28e5b76cb708c65b4318a0"}, +] + +[package.dependencies] +anyio = ">=3.5.0,<5" +distro = ">=1.7.0,<2" +httpx = ">=0.23.0,<1" +pydantic = ">=1.9.0,<3" +sniffio = "*" +typing-extensions = ">=4.10,<5" + +[[package]] +name = "certifi" +version = "2026.2.25" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "certifi-2026.2.25-py3-none-any.whl", hash = "sha256:027692e4402ad994f1c42e52a4997a9763c646b73e4096e4d5d6db8af1d6f0fa"}, + {file = "certifi-2026.2.25.tar.gz", hash = "sha256:e887ab5cee78ea814d3472169153c2d12cd43b14bd03329a39a9c6e2e80bfba7"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.4.4" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "charset_normalizer-3.4.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e824f1492727fa856dd6eda4f7cee25f8518a12f3c4a56a74e8095695089cf6d"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4bd5d4137d500351a30687c2d3971758aac9a19208fc110ccb9d7188fbe709e8"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:027f6de494925c0ab2a55eab46ae5129951638a49a34d87f4c3eda90f696b4ad"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f820802628d2694cb7e56db99213f930856014862f3fd943d290ea8438d07ca8"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:798d75d81754988d2565bff1b97ba5a44411867c0cf32b77a7e8f8d84796b10d"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9d1bb833febdff5c8927f922386db610b49db6e0d4f4ee29601d71e7c2694313"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:9cd98cdc06614a2f768d2b7286d66805f94c48cde050acdbbb7db2600ab3197e"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:077fbb858e903c73f6c9db43374fd213b0b6a778106bc7032446a8e8b5b38b93"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:244bfb999c71b35de57821b8ea746b24e863398194a4014e4c76adc2bbdfeff0"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:64b55f9dce520635f018f907ff1b0df1fdc31f2795a922fb49dd14fbcdf48c84"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:faa3a41b2b66b6e50f84ae4a68c64fcd0c44355741c6374813a800cd6695db9e"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:6515f3182dbe4ea06ced2d9e8666d97b46ef4c75e326b79bb624110f122551db"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cc00f04ed596e9dc0da42ed17ac5e596c6ccba999ba6bd92b0e0aef2f170f2d6"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-win32.whl", hash = "sha256:f34be2938726fc13801220747472850852fe6b1ea75869a048d6f896838c896f"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-win_amd64.whl", hash = "sha256:a61900df84c667873b292c3de315a786dd8dac506704dea57bc957bd31e22c7d"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-win_arm64.whl", hash = "sha256:cead0978fc57397645f12578bfd2d5ea9138ea0fac82b2f63f7f7c6877986a69"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6e1fcf0720908f200cd21aa4e6750a48ff6ce4afe7ff5a79a90d5ed8a08296f8"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5f819d5fe9234f9f82d75bdfa9aef3a3d72c4d24a6e57aeaebba32a704553aa0"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:a59cb51917aa591b1c4e6a43c132f0cdc3c76dbad6155df4e28ee626cc77a0a3"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8ef3c867360f88ac904fd3f5e1f902f13307af9052646963ee08ff4f131adafc"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d9e45d7faa48ee908174d8fe84854479ef838fc6a705c9315372eacbc2f02897"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:840c25fb618a231545cbab0564a799f101b63b9901f2569faecd6b222ac72381"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ca5862d5b3928c4940729dacc329aa9102900382fea192fc5e52eb69d6093815"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d9c7f57c3d666a53421049053eaacdd14bbd0a528e2186fcb2e672effd053bb0"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:277e970e750505ed74c832b4bf75dac7476262ee2a013f5574dd49075879e161"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:31fd66405eaf47bb62e8cd575dc621c56c668f27d46a61d975a249930dd5e2a4"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:0d3d8f15c07f86e9ff82319b3d9ef6f4bf907608f53fe9d92b28ea9ae3d1fd89"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:9f7fcd74d410a36883701fafa2482a6af2ff5ba96b9a620e9e0721e28ead5569"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ebf3e58c7ec8a8bed6d66a75d7fb37b55e5015b03ceae72a8e7c74495551e224"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-win32.whl", hash = "sha256:eecbc200c7fd5ddb9a7f16c7decb07b566c29fa2161a16cf67b8d068bd21690a"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:5ae497466c7901d54b639cf42d5b8c1b6a4fead55215500d2f486d34db48d016"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-win_arm64.whl", hash = "sha256:65e2befcd84bc6f37095f5961e68a6f077bf44946771354a28ad434c2cce0ae1"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0a98e6759f854bd25a58a73fa88833fba3b7c491169f86ce1180c948ab3fd394"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b5b290ccc2a263e8d185130284f8501e3e36c5e02750fc6b6bdeb2e9e96f1e25"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74bb723680f9f7a6234dcf67aea57e708ec1fbdf5699fb91dfd6f511b0a320ef"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f1e34719c6ed0b92f418c7c780480b26b5d9c50349e9a9af7d76bf757530350d"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2437418e20515acec67d86e12bf70056a33abdacb5cb1655042f6538d6b085a8"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:11d694519d7f29d6cd09f6ac70028dba10f92f6cdd059096db198c283794ac86"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ac1c4a689edcc530fc9d9aa11f5774b9e2f33f9a0c6a57864e90908f5208d30a"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:21d142cc6c0ec30d2efee5068ca36c128a30b0f2c53c1c07bd78cb6bc1d3be5f"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:5dbe56a36425d26d6cfb40ce79c314a2e4dd6211d51d6d2191c00bed34f354cc"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5bfbb1b9acf3334612667b61bd3002196fe2a1eb4dd74d247e0f2a4d50ec9bbf"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:d055ec1e26e441f6187acf818b73564e6e6282709e9bcb5b63f5b23068356a15"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:af2d8c67d8e573d6de5bc30cdb27e9b95e49115cd9baad5ddbd1a6207aaa82a9"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:780236ac706e66881f3b7f2f32dfe90507a09e67d1d454c762cf642e6e1586e0"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-win32.whl", hash = "sha256:5833d2c39d8896e4e19b689ffc198f08ea58116bee26dea51e362ecc7cd3ed26"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:a79cfe37875f822425b89a82333404539ae63dbdddf97f84dcbc3d339aae9525"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-win_arm64.whl", hash = "sha256:376bec83a63b8021bb5c8ea75e21c4ccb86e7e45ca4eb81146091b56599b80c3"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:e1f185f86a6f3403aa2420e815904c67b2f9ebc443f045edd0de921108345794"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b39f987ae8ccdf0d2642338faf2abb1862340facc796048b604ef14919e55ed"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3162d5d8ce1bb98dd51af660f2121c55d0fa541b46dff7bb9b9f86ea1d87de72"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:81d5eb2a312700f4ecaa977a8235b634ce853200e828fbadf3a9c50bab278328"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5bd2293095d766545ec1a8f612559f6b40abc0eb18bb2f5d1171872d34036ede"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a8a8b89589086a25749f471e6a900d3f662d1d3b6e2e59dcecf787b1cc3a1894"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc7637e2f80d8530ee4a78e878bce464f70087ce73cf7c1caf142416923b98f1"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f8bf04158c6b607d747e93949aa60618b61312fe647a6369f88ce2ff16043490"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:554af85e960429cf30784dd47447d5125aaa3b99a6f0683589dbd27e2f45da44"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:74018750915ee7ad843a774364e13a3db91682f26142baddf775342c3f5b1133"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:c0463276121fdee9c49b98908b3a89c39be45d86d1dbaa22957e38f6321d4ce3"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:362d61fd13843997c1c446760ef36f240cf81d3ebf74ac62652aebaf7838561e"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9a26f18905b8dd5d685d6d07b0cdf98a79f3c7a918906af7cc143ea2e164c8bc"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-win32.whl", hash = "sha256:9b35f4c90079ff2e2edc5b26c0c77925e5d2d255c42c74fdb70fb49b172726ac"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-win_amd64.whl", hash = "sha256:b435cba5f4f750aa6c0a0d92c541fb79f69a387c91e61f1795227e4ed9cece14"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-win_arm64.whl", hash = "sha256:542d2cee80be6f80247095cc36c418f7bddd14f4a6de45af91dfad36d817bba2"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:da3326d9e65ef63a817ecbcc0df6e94463713b754fe293eaa03da99befb9a5bd"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8af65f14dc14a79b924524b1e7fffe304517b2bff5a58bf64f30b98bbc5079eb"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74664978bb272435107de04e36db5a9735e78232b85b77d45cfb38f758efd33e"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:752944c7ffbfdd10c074dc58ec2d5a8a4cd9493b314d367c14d24c17684ddd14"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d1f13550535ad8cff21b8d757a3257963e951d96e20ec82ab44bc64aeb62a191"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ecaae4149d99b1c9e7b88bb03e3221956f68fd6d50be2ef061b2381b61d20838"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:cb6254dc36b47a990e59e1068afacdcd02958bdcce30bb50cc1700a8b9d624a6"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:c8ae8a0f02f57a6e61203a31428fa1d677cbe50c93622b4149d5c0f319c1d19e"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:47cc91b2f4dd2833fddaedd2893006b0106129d4b94fdb6af1f4ce5a9965577c"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:82004af6c302b5d3ab2cfc4cc5f29db16123b1a8417f2e25f9066f91d4411090"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:2b7d8f6c26245217bd2ad053761201e9f9680f8ce52f0fcd8d0755aeae5b2152"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:799a7a5e4fb2d5898c60b640fd4981d6a25f1c11790935a44ce38c54e985f828"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:99ae2cffebb06e6c22bdc25801d7b30f503cc87dbd283479e7b606f70aff57ec"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-win32.whl", hash = "sha256:f9d332f8c2a2fcbffe1378594431458ddbef721c1769d78e2cbc06280d8155f9"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-win_amd64.whl", hash = "sha256:8a6562c3700cce886c5be75ade4a5db4214fda19fede41d9792d100288d8f94c"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-win_arm64.whl", hash = "sha256:de00632ca48df9daf77a2c65a484531649261ec9f25489917f09e455cb09ddb2"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ce8a0633f41a967713a59c4139d29110c07e826d131a316b50ce11b1d79b4f84"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:eaabd426fe94daf8fd157c32e571c85cb12e66692f15516a83a03264b08d06c3"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:c4ef880e27901b6cc782f1b95f82da9313c0eb95c3af699103088fa0ac3ce9ac"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2aaba3b0819274cc41757a1da876f810a3e4d7b6eb25699253a4effef9e8e4af"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:778d2e08eda00f4256d7f672ca9fef386071c9202f5e4607920b86d7803387f2"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f155a433c2ec037d4e8df17d18922c3a0d9b3232a396690f17175d2946f0218d"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a8bf8d0f749c5757af2142fe7903a9df1d2e8aa3841559b2bad34b08d0e2bcf3"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:194f08cbb32dc406d6e1aea671a68be0823673db2832b38405deba2fb0d88f63"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_armv7l.whl", hash = "sha256:6aee717dcfead04c6eb1ce3bd29ac1e22663cdea57f943c87d1eab9a025438d7"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:cd4b7ca9984e5e7985c12bc60a6f173f3c958eae74f3ef6624bb6b26e2abbae4"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_riscv64.whl", hash = "sha256:b7cf1017d601aa35e6bb650b6ad28652c9cd78ee6caff19f3c28d03e1c80acbf"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:e912091979546adf63357d7e2ccff9b44f026c075aeaf25a52d0e95ad2281074"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:5cb4d72eea50c8868f5288b7f7f33ed276118325c1dfd3957089f6b519e1382a"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-win32.whl", hash = "sha256:837c2ce8c5a65a2035be9b3569c684358dfbf109fd3b6969630a87535495ceaa"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-win_amd64.whl", hash = "sha256:44c2a8734b333e0578090c4cd6b16f275e07aa6614ca8715e6c038e865e70576"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a9768c477b9d7bd54bc0c86dbaebdec6f03306675526c9927c0e8a04e8f94af9"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1bee1e43c28aa63cb16e5c14e582580546b08e535299b8b6158a7c9c768a1f3d"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:fd44c878ea55ba351104cb93cc85e74916eb8fa440ca7903e57575e97394f608"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:0f04b14ffe5fdc8c4933862d8306109a2c51e0704acfa35d51598eb45a1e89fc"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:cd09d08005f958f370f539f186d10aec3377d55b9eeb0d796025d4886119d76e"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4fe7859a4e3e8457458e2ff592f15ccb02f3da787fcd31e0183879c3ad4692a1"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:fa09f53c465e532f4d3db095e0c55b615f010ad81803d383195b6b5ca6cbf5f3"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:7fa17817dc5625de8a027cb8b26d9fefa3ea28c8253929b8d6649e705d2835b6"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:5947809c8a2417be3267efc979c47d76a079758166f7d43ef5ae8e9f92751f88"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:4902828217069c3c5c71094537a8e623f5d097858ac6ca8252f7b4d10b7560f1"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_riscv64.whl", hash = "sha256:7c308f7e26e4363d79df40ca5b2be1c6ba9f02bdbccfed5abddb7859a6ce72cf"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:2c9d3c380143a1fedbff95a312aa798578371eb29da42106a29019368a475318"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:cb01158d8b88ee68f15949894ccc6712278243d95f344770fa7593fa2d94410c"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-win32.whl", hash = "sha256:2677acec1a2f8ef614c6888b5b4ae4060cc184174a938ed4e8ef690e15d3e505"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:f8e160feb2aed042cd657a72acc0b481212ed28b1b9a95c0cee1621b524e1966"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-win_arm64.whl", hash = "sha256:b5d84d37db046c5ca74ee7bb47dd6cbc13f80665fdde3e8040bdd3fb015ecb50"}, + {file = "charset_normalizer-3.4.4-py3-none-any.whl", hash = "sha256:7a32c560861a02ff789ad905a2fe94e3f840803362c84fecf1851cb4cf3dc37f"}, + {file = "charset_normalizer-3.4.4.tar.gz", hash = "sha256:94537985111c35f28720e43603b8e7b43a6ecfb2ce1d3058bbe955b73404e21a"}, +] + +[[package]] +name = "click" +version = "8.3.1" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.10" +groups = ["main"] +files = [ + {file = "click-8.3.1-py3-none-any.whl", hash = "sha256:981153a64e25f12d547d3426c367a4857371575ee7ad18df2a6183ab0545b2a6"}, + {file = "click-8.3.1.tar.gz", hash = "sha256:12ff4785d337a1bb490bb7e9c2b1ee5da3112e94a8622f26a6c77f5d2fc6842a"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "cohere" +version = "5.20.7" +description = "" +optional = false +python-versions = "<4.0,>=3.9" +groups = ["main"] +files = [ + {file = "cohere-5.20.7-py3-none-any.whl", hash = "sha256:043fef2a12c30c07e9b2c1f0b869fd66ffd911f58d1492f87e901c4190a65914"}, + {file = "cohere-5.20.7.tar.gz", hash = "sha256:997ed85fabb3a1e4a4c036fdb520382e7bfa670db48eb59a026803b6f7061dbb"}, +] + +[package.dependencies] +fastavro = ">=1.9.4,<2.0.0" +httpx = ">=0.21.2" +pydantic = ">=1.9.2" +pydantic-core = ">=2.18.2" +requests = ">=2.0.0,<3.0.0" +tokenizers = ">=0.15,<1" +types-requests = ">=2.0.0,<3.0.0" +typing_extensions = ">=4.0.0" + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +groups = ["main"] +markers = "platform_system == \"Windows\"" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "distro" +version = "1.9.0" +description = "Distro - an OS platform information API" +optional = false +python-versions = ">=3.6" +groups = ["main"] +files = [ + {file = "distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2"}, + {file = "distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed"}, +] + +[[package]] +name = "exceptiongroup" +version = "1.3.1" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +groups = ["main"] +markers = "python_version == \"3.10\"" +files = [ + {file = "exceptiongroup-1.3.1-py3-none-any.whl", hash = "sha256:a7a39a3bd276781e98394987d3a5701d0c4edffb633bb7a5144577f82c773598"}, + {file = "exceptiongroup-1.3.1.tar.gz", hash = "sha256:8b412432c6055b0b7d14c310000ae93352ed6754f70fa8f7c34141f91c4e3219"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4.6.0", markers = "python_version < \"3.13\""} + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "fastavro" +version = "1.12.1" +description = "Fast read/write of AVRO files" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "fastavro-1.12.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:00650ca533907361edda22e6ffe8cf87ab2091c5d8aee5c8000b0f2dcdda7ed3"}, + {file = "fastavro-1.12.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ac76d6d95f909c72ee70d314b460b7e711d928845771531d823eb96a10952d26"}, + {file = "fastavro-1.12.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1f55eef18c41d4476bd32a82ed5dd86aabc3f614e1b66bdb09ffa291612e1670"}, + {file = "fastavro-1.12.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:81563e1f93570e6565487cdb01ba241a36a00e58cff9c5a0614af819d1155d8f"}, + {file = "fastavro-1.12.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:bec207360f76f0b3de540758a297193c5390e8e081c43c3317f610b1414d8c8f"}, + {file = "fastavro-1.12.1-cp310-cp310-win_amd64.whl", hash = "sha256:c0390bfe4a9f8056a75ac6785fbbff8f5e317f5356481d2e29ec980877d2314b"}, + {file = "fastavro-1.12.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6b632b713bc5d03928a87d811fa4a11d5f25cd43e79c161e291c7d3f7aa740fd"}, + {file = "fastavro-1.12.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:eaa7ab3769beadcebb60f0539054c7755f63bd9cf7666e2c15e615ab605f89a8"}, + {file = "fastavro-1.12.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:123fb221df3164abd93f2d042c82f538a1d5a43ce41375f12c91ce1355a9141e"}, + {file = "fastavro-1.12.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:632a4e3ff223f834ddb746baae0cc7cee1068eb12c32e4d982c2fee8a5b483d0"}, + {file = "fastavro-1.12.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:83e6caf4e7a8717d932a3b1ff31595ad169289bbe1128a216be070d3a8391671"}, + {file = "fastavro-1.12.1-cp311-cp311-win_amd64.whl", hash = "sha256:b91a0fe5a173679a6c02d53ca22dcaad0a2c726b74507e0c1c2e71a7c3f79ef9"}, + {file = "fastavro-1.12.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:509818cb24b98a804fc80be9c5fed90f660310ae3d59382fc811bfa187122167"}, + {file = "fastavro-1.12.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:089e155c0c76e0d418d7e79144ce000524dd345eab3bc1e9c5ae69d500f71b14"}, + {file = "fastavro-1.12.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:44cbff7518901c91a82aab476fcab13d102e4999499df219d481b9e15f61af34"}, + {file = "fastavro-1.12.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a275e48df0b1701bb764b18a8a21900b24cf882263cb03d35ecdba636bbc830b"}, + {file = "fastavro-1.12.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2de72d786eb38be6b16d556b27232b1bf1b2797ea09599507938cdb7a9fe3e7c"}, + {file = "fastavro-1.12.1-cp312-cp312-win_amd64.whl", hash = "sha256:9090f0dee63fe022ee9cc5147483366cc4171c821644c22da020d6b48f576b4f"}, + {file = "fastavro-1.12.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:78df838351e4dff9edd10a1c41d1324131ffecbadefb9c297d612ef5363c049a"}, + {file = "fastavro-1.12.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:780476c23175d2ae457c52f45b9ffa9d504593499a36cd3c1929662bf5b7b14b"}, + {file = "fastavro-1.12.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0714b285160fcd515eb0455540f40dd6dac93bdeacdb03f24e8eac3d8aa51f8d"}, + {file = "fastavro-1.12.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a8bc2dcec5843d499f2489bfe0747999108f78c5b29295d877379f1972a3d41a"}, + {file = "fastavro-1.12.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:3b1921ac35f3d89090a5816b626cf46e67dbecf3f054131f84d56b4e70496f45"}, + {file = "fastavro-1.12.1-cp313-cp313-win_amd64.whl", hash = "sha256:5aa777b8ee595b50aa084104cd70670bf25a7bbb9fd8bb5d07524b0785ee1699"}, + {file = "fastavro-1.12.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:c3d67c47f177e486640404a56f2f50b165fe892cc343ac3a34673b80cc7f1dd6"}, + {file = "fastavro-1.12.1-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5217f773492bac43dae15ff2931432bce2d7a80be7039685a78d3fab7df910bd"}, + {file = "fastavro-1.12.1-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:469fecb25cba07f2e1bfa4c8d008477cd6b5b34a59d48715e1b1a73f6160097d"}, + {file = "fastavro-1.12.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:d71c8aa841ef65cfab709a22bb887955f42934bced3ddb571e98fdbdade4c609"}, + {file = "fastavro-1.12.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:b81fc04e85dfccf7c028e0580c606e33aa8472370b767ef058aae2c674a90746"}, + {file = "fastavro-1.12.1-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:9445da127751ba65975d8e4bdabf36bfcfdad70fc35b2d988e3950cce0ec0e7c"}, + {file = "fastavro-1.12.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ed924233272719b5d5a6a0b4d80ef3345fc7e84fc7a382b6232192a9112d38a6"}, + {file = "fastavro-1.12.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3616e2f0e1c9265e92954fa099db79c6e7817356d3ff34f4bcc92699ae99697c"}, + {file = "fastavro-1.12.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:cb0337b42fd3c047fcf0e9b7597bd6ad25868de719f29da81eabb6343f08d399"}, + {file = "fastavro-1.12.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:64961ab15b74b7c168717bbece5660e0f3d457837c3cc9d9145181d011199fa7"}, + {file = "fastavro-1.12.1-cp314-cp314-win_amd64.whl", hash = "sha256:792356d320f6e757e89f7ac9c22f481e546c886454a6709247f43c0dd7058004"}, + {file = "fastavro-1.12.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:120aaf82ac19d60a1016afe410935fe94728752d9c2d684e267e5b7f0e70f6d9"}, + {file = "fastavro-1.12.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b6a3462934b20a74f9ece1daa49c2e4e749bd9a35fa2657b53bf62898fba80f5"}, + {file = "fastavro-1.12.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:1f81011d54dd47b12437b51dd93a70a9aa17b61307abf26542fc3c13efbc6c51"}, + {file = "fastavro-1.12.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:43ded16b3f4a9f1a42f5970c2aa618acb23ea59c4fcaa06680bdf470b255e5a8"}, + {file = "fastavro-1.12.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:02281432dcb11c78b3280da996eff61ee0eff39c5de06c6e0fbf19275093e6d4"}, + {file = "fastavro-1.12.1-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4128978b930aaf930332db4b3acc290783183f3be06a241ae4a482f3ed8ce892"}, + {file = "fastavro-1.12.1-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:546ffffda6610fca672f0ed41149808e106d8272bb246aa7539fa8bb6f117f17"}, + {file = "fastavro-1.12.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:a7d840ccd9aacada3ddc80fbcc4ea079b658107fe62e9d289a0de9d54e95d366"}, + {file = "fastavro-1.12.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:3100ad643e7fa658469a2a2db229981c1a000ff16b8037c0b58ce3ec4d2107e8"}, + {file = "fastavro-1.12.1-cp39-cp39-win_amd64.whl", hash = "sha256:a38607444281619eda3a9c1be9f5397634012d1b237142eee1540e810b30ac8b"}, + {file = "fastavro-1.12.1.tar.gz", hash = "sha256:2f285be49e45bc047ab2f6bed040bb349da85db3f3c87880e4b92595ea093b2b"}, +] + +[package.extras] +codecs = ["cramjam", "lz4", "zstandard"] +lz4 = ["lz4"] +snappy = ["cramjam"] +zstandard = ["zstandard"] + +[[package]] +name = "filelock" +version = "3.25.0" +description = "A platform independent file lock." +optional = false +python-versions = ">=3.10" +groups = ["main"] +files = [ + {file = "filelock-3.25.0-py3-none-any.whl", hash = "sha256:5ccf8069f7948f494968fc0713c10e5c182a9c9d9eef3a636307a20c2490f047"}, + {file = "filelock-3.25.0.tar.gz", hash = "sha256:8f00faf3abf9dc730a1ffe9c354ae5c04e079ab7d3a683b7c32da5dd05f26af3"}, +] + +[[package]] +name = "firecrawl-py" +version = "0.0.16" +description = "Python SDK for Firecrawl API" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "firecrawl_py-0.0.16-py3-none-any.whl", hash = "sha256:9024f483b501852a6b9c4e6cdfc9e8dde452d922afac357080bb278a0c9c2a26"}, + {file = "firecrawl_py-0.0.16.tar.gz", hash = "sha256:6c662fa0a549bc7f5c0acb704baba6731869ca0451094034264dfc1b4eb086e4"}, +] + +[package.dependencies] +requests = "*" + +[[package]] +name = "frozenlist" +version = "1.8.0" +description = "A list-like structure which implements collections.abc.MutableSequence" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "frozenlist-1.8.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b37f6d31b3dcea7deb5e9696e529a6aa4a898adc33db82da12e4c60a7c4d2011"}, + {file = "frozenlist-1.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ef2b7b394f208233e471abc541cc6991f907ffd47dc72584acee3147899d6565"}, + {file = "frozenlist-1.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a88f062f072d1589b7b46e951698950e7da00442fc1cacbe17e19e025dc327ad"}, + {file = "frozenlist-1.8.0-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:f57fb59d9f385710aa7060e89410aeb5058b99e62f4d16b08b91986b9a2140c2"}, + {file = "frozenlist-1.8.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:799345ab092bee59f01a915620b5d014698547afd011e691a208637312db9186"}, + {file = "frozenlist-1.8.0-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:c23c3ff005322a6e16f71bf8692fcf4d5a304aaafe1e262c98c6d4adc7be863e"}, + {file = "frozenlist-1.8.0-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8a76ea0f0b9dfa06f254ee06053d93a600865b3274358ca48a352ce4f0798450"}, + {file = "frozenlist-1.8.0-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:c7366fe1418a6133d5aa824ee53d406550110984de7637d65a178010f759c6ef"}, + {file = "frozenlist-1.8.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:13d23a45c4cebade99340c4165bd90eeb4a56c6d8a9d8aa49568cac19a6d0dc4"}, + {file = "frozenlist-1.8.0-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:e4a3408834f65da56c83528fb52ce7911484f0d1eaf7b761fc66001db1646eff"}, + {file = "frozenlist-1.8.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:42145cd2748ca39f32801dad54aeea10039da6f86e303659db90db1c4b614c8c"}, + {file = "frozenlist-1.8.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:e2de870d16a7a53901e41b64ffdf26f2fbb8917b3e6ebf398098d72c5b20bd7f"}, + {file = "frozenlist-1.8.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:20e63c9493d33ee48536600d1a5c95eefc870cd71e7ab037763d1fbb89cc51e7"}, + {file = "frozenlist-1.8.0-cp310-cp310-win32.whl", hash = "sha256:adbeebaebae3526afc3c96fad434367cafbfd1b25d72369a9e5858453b1bb71a"}, + {file = "frozenlist-1.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:667c3777ca571e5dbeb76f331562ff98b957431df140b54c85fd4d52eea8d8f6"}, + {file = "frozenlist-1.8.0-cp310-cp310-win_arm64.whl", hash = "sha256:80f85f0a7cc86e7a54c46d99c9e1318ff01f4687c172ede30fd52d19d1da1c8e"}, + {file = "frozenlist-1.8.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:09474e9831bc2b2199fad6da3c14c7b0fbdd377cce9d3d77131be28906cb7d84"}, + {file = "frozenlist-1.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:17c883ab0ab67200b5f964d2b9ed6b00971917d5d8a92df149dc2c9779208ee9"}, + {file = "frozenlist-1.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fa47e444b8ba08fffd1c18e8cdb9a75db1b6a27f17507522834ad13ed5922b93"}, + {file = "frozenlist-1.8.0-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:2552f44204b744fba866e573be4c1f9048d6a324dfe14475103fd51613eb1d1f"}, + {file = "frozenlist-1.8.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:957e7c38f250991e48a9a73e6423db1bb9dd14e722a10f6b8bb8e16a0f55f695"}, + {file = "frozenlist-1.8.0-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:8585e3bb2cdea02fc88ffa245069c36555557ad3609e83be0ec71f54fd4abb52"}, + {file = "frozenlist-1.8.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:edee74874ce20a373d62dc28b0b18b93f645633c2943fd90ee9d898550770581"}, + {file = "frozenlist-1.8.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:c9a63152fe95756b85f31186bddf42e4c02c6321207fd6601a1c89ebac4fe567"}, + {file = "frozenlist-1.8.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b6db2185db9be0a04fecf2f241c70b63b1a242e2805be291855078f2b404dd6b"}, + {file = "frozenlist-1.8.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:f4be2e3d8bc8aabd566f8d5b8ba7ecc09249d74ba3c9ed52e54dc23a293f0b92"}, + {file = "frozenlist-1.8.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:c8d1634419f39ea6f5c427ea2f90ca85126b54b50837f31497f3bf38266e853d"}, + {file = "frozenlist-1.8.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:1a7fa382a4a223773ed64242dbe1c9c326ec09457e6b8428efb4118c685c3dfd"}, + {file = "frozenlist-1.8.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:11847b53d722050808926e785df837353bd4d75f1d494377e59b23594d834967"}, + {file = "frozenlist-1.8.0-cp311-cp311-win32.whl", hash = "sha256:27c6e8077956cf73eadd514be8fb04d77fc946a7fe9f7fe167648b0b9085cc25"}, + {file = "frozenlist-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:ac913f8403b36a2c8610bbfd25b8013488533e71e62b4b4adce9c86c8cea905b"}, + {file = "frozenlist-1.8.0-cp311-cp311-win_arm64.whl", hash = "sha256:d4d3214a0f8394edfa3e303136d0575eece0745ff2b47bd2cb2e66dd92d4351a"}, + {file = "frozenlist-1.8.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:78f7b9e5d6f2fdb88cdde9440dc147259b62b9d3b019924def9f6478be254ac1"}, + {file = "frozenlist-1.8.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:229bf37d2e4acdaf808fd3f06e854a4a7a3661e871b10dc1f8f1896a3b05f18b"}, + {file = "frozenlist-1.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f833670942247a14eafbb675458b4e61c82e002a148f49e68257b79296e865c4"}, + {file = "frozenlist-1.8.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:494a5952b1c597ba44e0e78113a7266e656b9794eec897b19ead706bd7074383"}, + {file = "frozenlist-1.8.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:96f423a119f4777a4a056b66ce11527366a8bb92f54e541ade21f2374433f6d4"}, + {file = "frozenlist-1.8.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3462dd9475af2025c31cc61be6652dfa25cbfb56cbbf52f4ccfe029f38decaf8"}, + {file = "frozenlist-1.8.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c4c800524c9cd9bac5166cd6f55285957fcfc907db323e193f2afcd4d9abd69b"}, + {file = "frozenlist-1.8.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d6a5df73acd3399d893dafc71663ad22534b5aa4f94e8a2fabfe856c3c1b6a52"}, + {file = "frozenlist-1.8.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:405e8fe955c2280ce66428b3ca55e12b3c4e9c336fb2103a4937e891c69a4a29"}, + {file = "frozenlist-1.8.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:908bd3f6439f2fef9e85031b59fd4f1297af54415fb60e4254a95f75b3cab3f3"}, + {file = "frozenlist-1.8.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:294e487f9ec720bd8ffcebc99d575f7eff3568a08a253d1ee1a0378754b74143"}, + {file = "frozenlist-1.8.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:74c51543498289c0c43656701be6b077f4b265868fa7f8a8859c197006efb608"}, + {file = "frozenlist-1.8.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:776f352e8329135506a1d6bf16ac3f87bc25b28e765949282dcc627af36123aa"}, + {file = "frozenlist-1.8.0-cp312-cp312-win32.whl", hash = "sha256:433403ae80709741ce34038da08511d4a77062aa924baf411ef73d1146e74faf"}, + {file = "frozenlist-1.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:34187385b08f866104f0c0617404c8eb08165ab1272e884abc89c112e9c00746"}, + {file = "frozenlist-1.8.0-cp312-cp312-win_arm64.whl", hash = "sha256:fe3c58d2f5db5fbd18c2987cba06d51b0529f52bc3a6cdc33d3f4eab725104bd"}, + {file = "frozenlist-1.8.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8d92f1a84bb12d9e56f818b3a746f3efba93c1b63c8387a73dde655e1e42282a"}, + {file = "frozenlist-1.8.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:96153e77a591c8adc2ee805756c61f59fef4cf4073a9275ee86fe8cba41241f7"}, + {file = "frozenlist-1.8.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f21f00a91358803399890ab167098c131ec2ddd5f8f5fd5fe9c9f2c6fcd91e40"}, + {file = "frozenlist-1.8.0-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:fb30f9626572a76dfe4293c7194a09fb1fe93ba94c7d4f720dfae3b646b45027"}, + {file = "frozenlist-1.8.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:eaa352d7047a31d87dafcacbabe89df0aa506abb5b1b85a2fb91bc3faa02d822"}, + {file = "frozenlist-1.8.0-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:03ae967b4e297f58f8c774c7eabcce57fe3c2434817d4385c50661845a058121"}, + {file = "frozenlist-1.8.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f6292f1de555ffcc675941d65fffffb0a5bcd992905015f85d0592201793e0e5"}, + {file = "frozenlist-1.8.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:29548f9b5b5e3460ce7378144c3010363d8035cea44bc0bf02d57f5a685e084e"}, + {file = "frozenlist-1.8.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ec3cc8c5d4084591b4237c0a272cc4f50a5b03396a47d9caaf76f5d7b38a4f11"}, + {file = "frozenlist-1.8.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:517279f58009d0b1f2e7c1b130b377a349405da3f7621ed6bfae50b10adf20c1"}, + {file = "frozenlist-1.8.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:db1e72ede2d0d7ccb213f218df6a078a9c09a7de257c2fe8fcef16d5925230b1"}, + {file = "frozenlist-1.8.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:b4dec9482a65c54a5044486847b8a66bf10c9cb4926d42927ec4e8fd5db7fed8"}, + {file = "frozenlist-1.8.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:21900c48ae04d13d416f0e1e0c4d81f7931f73a9dfa0b7a8746fb2fe7dd970ed"}, + {file = "frozenlist-1.8.0-cp313-cp313-win32.whl", hash = "sha256:8b7b94a067d1c504ee0b16def57ad5738701e4ba10cec90529f13fa03c833496"}, + {file = "frozenlist-1.8.0-cp313-cp313-win_amd64.whl", hash = "sha256:878be833caa6a3821caf85eb39c5ba92d28e85df26d57afb06b35b2efd937231"}, + {file = "frozenlist-1.8.0-cp313-cp313-win_arm64.whl", hash = "sha256:44389d135b3ff43ba8cc89ff7f51f5a0bb6b63d829c8300f79a2fe4fe61bcc62"}, + {file = "frozenlist-1.8.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:e25ac20a2ef37e91c1b39938b591457666a0fa835c7783c3a8f33ea42870db94"}, + {file = "frozenlist-1.8.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:07cdca25a91a4386d2e76ad992916a85038a9b97561bf7a3fd12d5d9ce31870c"}, + {file = "frozenlist-1.8.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:4e0c11f2cc6717e0a741f84a527c52616140741cd812a50422f83dc31749fb52"}, + {file = "frozenlist-1.8.0-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:b3210649ee28062ea6099cfda39e147fa1bc039583c8ee4481cb7811e2448c51"}, + {file = "frozenlist-1.8.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:581ef5194c48035a7de2aefc72ac6539823bb71508189e5de01d60c9dcd5fa65"}, + {file = "frozenlist-1.8.0-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3ef2d026f16a2b1866e1d86fc4e1291e1ed8a387b2c333809419a2f8b3a77b82"}, + {file = "frozenlist-1.8.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:5500ef82073f599ac84d888e3a8c1f77ac831183244bfd7f11eaa0289fb30714"}, + {file = "frozenlist-1.8.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:50066c3997d0091c411a66e710f4e11752251e6d2d73d70d8d5d4c76442a199d"}, + {file = "frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:5c1c8e78426e59b3f8005e9b19f6ff46e5845895adbde20ece9218319eca6506"}, + {file = "frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:eefdba20de0d938cec6a89bd4d70f346a03108a19b9df4248d3cf0d88f1b0f51"}, + {file = "frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:cf253e0e1c3ceb4aaff6df637ce033ff6535fb8c70a764a8f46aafd3d6ab798e"}, + {file = "frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:032efa2674356903cd0261c4317a561a6850f3ac864a63fc1583147fb05a79b0"}, + {file = "frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6da155091429aeba16851ecb10a9104a108bcd32f6c1642867eadaee401c1c41"}, + {file = "frozenlist-1.8.0-cp313-cp313t-win32.whl", hash = "sha256:0f96534f8bfebc1a394209427d0f8a63d343c9779cda6fc25e8e121b5fd8555b"}, + {file = "frozenlist-1.8.0-cp313-cp313t-win_amd64.whl", hash = "sha256:5d63a068f978fc69421fb0e6eb91a9603187527c86b7cd3f534a5b77a592b888"}, + {file = "frozenlist-1.8.0-cp313-cp313t-win_arm64.whl", hash = "sha256:bf0a7e10b077bf5fb9380ad3ae8ce20ef919a6ad93b4552896419ac7e1d8e042"}, + {file = "frozenlist-1.8.0-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:cee686f1f4cadeb2136007ddedd0aaf928ab95216e7691c63e50a8ec066336d0"}, + {file = "frozenlist-1.8.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:119fb2a1bd47307e899c2fac7f28e85b9a543864df47aa7ec9d3c1b4545f096f"}, + {file = "frozenlist-1.8.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:4970ece02dbc8c3a92fcc5228e36a3e933a01a999f7094ff7c23fbd2beeaa67c"}, + {file = "frozenlist-1.8.0-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:cba69cb73723c3f329622e34bdbf5ce1f80c21c290ff04256cff1cd3c2036ed2"}, + {file = "frozenlist-1.8.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:778a11b15673f6f1df23d9586f83c4846c471a8af693a22e066508b77d201ec8"}, + {file = "frozenlist-1.8.0-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:0325024fe97f94c41c08872db482cf8ac4800d80e79222c6b0b7b162d5b13686"}, + {file = "frozenlist-1.8.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:97260ff46b207a82a7567b581ab4190bd4dfa09f4db8a8b49d1a958f6aa4940e"}, + {file = "frozenlist-1.8.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:54b2077180eb7f83dd52c40b2750d0a9f175e06a42e3213ce047219de902717a"}, + {file = "frozenlist-1.8.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:2f05983daecab868a31e1da44462873306d3cbfd76d1f0b5b69c473d21dbb128"}, + {file = "frozenlist-1.8.0-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:33f48f51a446114bc5d251fb2954ab0164d5be02ad3382abcbfe07e2531d650f"}, + {file = "frozenlist-1.8.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:154e55ec0655291b5dd1b8731c637ecdb50975a2ae70c606d100750a540082f7"}, + {file = "frozenlist-1.8.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:4314debad13beb564b708b4a496020e5306c7333fa9a3ab90374169a20ffab30"}, + {file = "frozenlist-1.8.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:073f8bf8becba60aa931eb3bc420b217bb7d5b8f4750e6f8b3be7f3da85d38b7"}, + {file = "frozenlist-1.8.0-cp314-cp314-win32.whl", hash = "sha256:bac9c42ba2ac65ddc115d930c78d24ab8d4f465fd3fc473cdedfccadb9429806"}, + {file = "frozenlist-1.8.0-cp314-cp314-win_amd64.whl", hash = "sha256:3e0761f4d1a44f1d1a47996511752cf3dcec5bbdd9cc2b4fe595caf97754b7a0"}, + {file = "frozenlist-1.8.0-cp314-cp314-win_arm64.whl", hash = "sha256:d1eaff1d00c7751b7c6662e9c5ba6eb2c17a2306ba5e2a37f24ddf3cc953402b"}, + {file = "frozenlist-1.8.0-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:d3bb933317c52d7ea5004a1c442eef86f426886fba134ef8cf4226ea6ee1821d"}, + {file = "frozenlist-1.8.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:8009897cdef112072f93a0efdce29cd819e717fd2f649ee3016efd3cd885a7ed"}, + {file = "frozenlist-1.8.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:2c5dcbbc55383e5883246d11fd179782a9d07a986c40f49abe89ddf865913930"}, + {file = "frozenlist-1.8.0-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:39ecbc32f1390387d2aa4f5a995e465e9e2f79ba3adcac92d68e3e0afae6657c"}, + {file = "frozenlist-1.8.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:92db2bf818d5cc8d9c1f1fc56b897662e24ea5adb36ad1f1d82875bd64e03c24"}, + {file = "frozenlist-1.8.0-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:2dc43a022e555de94c3b68a4ef0b11c4f747d12c024a520c7101709a2144fb37"}, + {file = "frozenlist-1.8.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:cb89a7f2de3602cfed448095bab3f178399646ab7c61454315089787df07733a"}, + {file = "frozenlist-1.8.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:33139dc858c580ea50e7e60a1b0ea003efa1fd42e6ec7fdbad78fff65fad2fd2"}, + {file = "frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:168c0969a329b416119507ba30b9ea13688fafffac1b7822802537569a1cb0ef"}, + {file = "frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:28bd570e8e189d7f7b001966435f9dac6718324b5be2990ac496cf1ea9ddb7fe"}, + {file = "frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:b2a095d45c5d46e5e79ba1e5b9cb787f541a8dee0433836cea4b96a2c439dcd8"}, + {file = "frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:eab8145831a0d56ec9c4139b6c3e594c7a83c2c8be25d5bcf2d86136a532287a"}, + {file = "frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:974b28cf63cc99dfb2188d8d222bc6843656188164848c4f679e63dae4b0708e"}, + {file = "frozenlist-1.8.0-cp314-cp314t-win32.whl", hash = "sha256:342c97bf697ac5480c0a7ec73cd700ecfa5a8a40ac923bd035484616efecc2df"}, + {file = "frozenlist-1.8.0-cp314-cp314t-win_amd64.whl", hash = "sha256:06be8f67f39c8b1dc671f5d83aaefd3358ae5cdcf8314552c57e7ed3e6475bdd"}, + {file = "frozenlist-1.8.0-cp314-cp314t-win_arm64.whl", hash = "sha256:102e6314ca4da683dca92e3b1355490fed5f313b768500084fbe6371fddfdb79"}, + {file = "frozenlist-1.8.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:d8b7138e5cd0647e4523d6685b0eac5d4be9a184ae9634492f25c6eb38c12a47"}, + {file = "frozenlist-1.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a6483e309ca809f1efd154b4d37dc6d9f61037d6c6a81c2dc7a15cb22c8c5dca"}, + {file = "frozenlist-1.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1b9290cf81e95e93fdf90548ce9d3c1211cf574b8e3f4b3b7cb0537cf2227068"}, + {file = "frozenlist-1.8.0-cp39-cp39-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:59a6a5876ca59d1b63af8cd5e7ffffb024c3dc1e9cf9301b21a2e76286505c95"}, + {file = "frozenlist-1.8.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6dc4126390929823e2d2d9dc79ab4046ed74680360fc5f38b585c12c66cdf459"}, + {file = "frozenlist-1.8.0-cp39-cp39-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:332db6b2563333c5671fecacd085141b5800cb866be16d5e3eb15a2086476675"}, + {file = "frozenlist-1.8.0-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:9ff15928d62a0b80bb875655c39bf517938c7d589554cbd2669be42d97c2cb61"}, + {file = "frozenlist-1.8.0-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:7bf6cdf8e07c8151fba6fe85735441240ec7f619f935a5205953d58009aef8c6"}, + {file = "frozenlist-1.8.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:48e6d3f4ec5c7273dfe83ff27c91083c6c9065af655dc2684d2c200c94308bb5"}, + {file = "frozenlist-1.8.0-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:1a7607e17ad33361677adcd1443edf6f5da0ce5e5377b798fba20fae194825f3"}, + {file = "frozenlist-1.8.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:5a3a935c3a4e89c733303a2d5a7c257ea44af3a56c8202df486b7f5de40f37e1"}, + {file = "frozenlist-1.8.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:940d4a017dbfed9daf46a3b086e1d2167e7012ee297fef9e1c545c4d022f5178"}, + {file = "frozenlist-1.8.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:b9be22a69a014bc47e78072d0ecae716f5eb56c15238acca0f43d6eb8e4a5bda"}, + {file = "frozenlist-1.8.0-cp39-cp39-win32.whl", hash = "sha256:1aa77cb5697069af47472e39612976ed05343ff2e84a3dcf15437b232cbfd087"}, + {file = "frozenlist-1.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:7398c222d1d405e796970320036b1b563892b65809d9e5261487bb2c7f7b5c6a"}, + {file = "frozenlist-1.8.0-cp39-cp39-win_arm64.whl", hash = "sha256:b4f3b365f31c6cd4af24545ca0a244a53688cad8834e32f56831c4923b50a103"}, + {file = "frozenlist-1.8.0-py3-none-any.whl", hash = "sha256:0c18a16eab41e82c295618a77502e17b195883241c563b00f0aa5106fc4eaa0d"}, + {file = "frozenlist-1.8.0.tar.gz", hash = "sha256:3ede829ed8d842f6cd48fc7081d7a41001a56f1f38603f9d49bf3020d59a31ad"}, +] + +[[package]] +name = "fsspec" +version = "2026.2.0" +description = "File-system specification" +optional = false +python-versions = ">=3.10" +groups = ["main"] +files = [ + {file = "fsspec-2026.2.0-py3-none-any.whl", hash = "sha256:98de475b5cb3bd66bedd5c4679e87b4fdfe1a3bf4d707b151b3c07e58c9a2437"}, + {file = "fsspec-2026.2.0.tar.gz", hash = "sha256:6544e34b16869f5aacd5b90bdf1a71acb37792ea3ddf6125ee69a22a53fb8bff"}, +] + +[package.extras] +abfs = ["adlfs"] +adl = ["adlfs"] +arrow = ["pyarrow (>=1)"] +dask = ["dask", "distributed"] +dev = ["pre-commit", "ruff (>=0.5)"] +doc = ["numpydoc", "sphinx", "sphinx-design", "sphinx-rtd-theme", "yarl"] +dropbox = ["dropbox", "dropboxdrivefs", "requests"] +full = ["adlfs", "aiohttp (!=4.0.0a0,!=4.0.0a1)", "dask", "distributed", "dropbox", "dropboxdrivefs", "fusepy", "gcsfs (>2024.2.0)", "libarchive-c", "ocifs", "panel", "paramiko", "pyarrow (>=1)", "pygit2", "requests", "s3fs (>2024.2.0)", "smbprotocol", "tqdm"] +fuse = ["fusepy"] +gcs = ["gcsfs (>2024.2.0)"] +git = ["pygit2"] +github = ["requests"] +gs = ["gcsfs"] +gui = ["panel"] +hdfs = ["pyarrow (>=1)"] +http = ["aiohttp (!=4.0.0a0,!=4.0.0a1)"] +libarchive = ["libarchive-c"] +oci = ["ocifs"] +s3 = ["s3fs (>2024.2.0)"] +sftp = ["paramiko"] +smb = ["smbprotocol"] +ssh = ["paramiko"] +test = ["aiohttp (!=4.0.0a0,!=4.0.0a1)", "numpy", "pytest", "pytest-asyncio (!=0.22.0)", "pytest-benchmark", "pytest-cov", "pytest-mock", "pytest-recording", "pytest-rerunfailures", "requests"] +test-downstream = ["aiobotocore (>=2.5.4,<3.0.0)", "dask[dataframe,test]", "moto[server] (>4,<5)", "pytest-timeout", "xarray"] +test-full = ["adlfs", "aiohttp (!=4.0.0a0,!=4.0.0a1)", "backports-zstd ; python_version < \"3.14\"", "cloudpickle", "dask", "distributed", "dropbox", "dropboxdrivefs", "fastparquet", "fusepy", "gcsfs", "jinja2", "kerchunk", "libarchive-c", "lz4", "notebook", "numpy", "ocifs", "pandas (<3.0.0)", "panel", "paramiko", "pyarrow", "pyarrow (>=1)", "pyftpdlib", "pygit2", "pytest", "pytest-asyncio (!=0.22.0)", "pytest-benchmark", "pytest-cov", "pytest-mock", "pytest-recording", "pytest-rerunfailures", "python-snappy", "requests", "smbprotocol", "tqdm", "urllib3", "zarr", "zstandard ; python_version < \"3.14\""] +tqdm = ["tqdm"] + +[[package]] +name = "greenlet" +version = "3.3.2" +description = "Lightweight in-process concurrent programming" +optional = false +python-versions = ">=3.10" +groups = ["main"] +files = [ + {file = "greenlet-3.3.2-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:9bc885b89709d901859cf95179ec9f6bb67a3d2bb1f0e88456461bd4b7f8fd0d"}, + {file = "greenlet-3.3.2-cp310-cp310-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b568183cf65b94919be4438dc28416b234b678c608cafac8874dfeeb2a9bbe13"}, + {file = "greenlet-3.3.2-cp310-cp310-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:527fec58dc9f90efd594b9b700662ed3fb2493c2122067ac9c740d98080a620e"}, + {file = "greenlet-3.3.2-cp310-cp310-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:508c7f01f1791fbc8e011bd508f6794cb95397fdb198a46cb6635eb5b78d85a7"}, + {file = "greenlet-3.3.2-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ad0c8917dd42a819fe77e6bdfcb84e3379c0de956469301d9fd36427a1ca501f"}, + {file = "greenlet-3.3.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:97245cc10e5515dbc8c3104b2928f7f02b6813002770cfaffaf9a6e0fc2b94ef"}, + {file = "greenlet-3.3.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8c1fdd7d1b309ff0da81d60a9688a8bd044ac4e18b250320a96fc68d31c209ca"}, + {file = "greenlet-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:5d0e35379f93a6d0222de929a25ab47b5eb35b5ef4721c2b9cbcc4036129ff1f"}, + {file = "greenlet-3.3.2-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:c56692189a7d1c7606cb794be0a8381470d95c57ce5be03fb3d0ef57c7853b86"}, + {file = "greenlet-3.3.2-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1ebd458fa8285960f382841da585e02201b53a5ec2bac6b156fc623b5ce4499f"}, + {file = "greenlet-3.3.2-cp311-cp311-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a443358b33c4ec7b05b79a7c8b466f5d275025e750298be7340f8fc63dff2a55"}, + {file = "greenlet-3.3.2-cp311-cp311-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:4375a58e49522698d3e70cc0b801c19433021b5c37686f7ce9c65b0d5c8677d2"}, + {file = "greenlet-3.3.2-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8e2cd90d413acbf5e77ae41e5d3c9b3ac1d011a756d7284d7f3f2b806bbd6358"}, + {file = "greenlet-3.3.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:442b6057453c8cb29b4fb36a2ac689382fc71112273726e2423f7f17dc73bf99"}, + {file = "greenlet-3.3.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:45abe8eb6339518180d5a7fa47fa01945414d7cca5ecb745346fc6a87d2750be"}, + {file = "greenlet-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:1e692b2dae4cc7077cbb11b47d258533b48c8fde69a33d0d8a82e2fe8d8531d5"}, + {file = "greenlet-3.3.2-cp311-cp311-win_arm64.whl", hash = "sha256:02b0a8682aecd4d3c6c18edf52bc8e51eacdd75c8eac52a790a210b06aa295fd"}, + {file = "greenlet-3.3.2-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:ac8d61d4343b799d1e526db579833d72f23759c71e07181c2d2944e429eb09cd"}, + {file = "greenlet-3.3.2-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3ceec72030dae6ac0c8ed7591b96b70410a8be370b6a477b1dbc072856ad02bd"}, + {file = "greenlet-3.3.2-cp312-cp312-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a2a5be83a45ce6188c045bcc44b0ee037d6a518978de9a5d97438548b953a1ac"}, + {file = "greenlet-3.3.2-cp312-cp312-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:ae9e21c84035c490506c17002f5c8ab25f980205c3e61ddb3a2a2a2e6c411fcb"}, + {file = "greenlet-3.3.2-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:43e99d1749147ac21dde49b99c9abffcbc1e2d55c67501465ef0930d6e78e070"}, + {file = "greenlet-3.3.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4c956a19350e2c37f2c48b336a3afb4bff120b36076d9d7fb68cb44e05d95b79"}, + {file = "greenlet-3.3.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6c6f8ba97d17a1e7d664151284cb3315fc5f8353e75221ed4324f84eb162b395"}, + {file = "greenlet-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:34308836d8370bddadb41f5a7ce96879b72e2fdfb4e87729330c6ab52376409f"}, + {file = "greenlet-3.3.2-cp312-cp312-win_arm64.whl", hash = "sha256:d3a62fa76a32b462a97198e4c9e99afb9ab375115e74e9a83ce180e7a496f643"}, + {file = "greenlet-3.3.2-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:aa6ac98bdfd716a749b84d4034486863fd81c3abde9aa3cf8eff9127981a4ae4"}, + {file = "greenlet-3.3.2-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ab0c7e7901a00bc0a7284907273dc165b32e0d109a6713babd04471327ff7986"}, + {file = "greenlet-3.3.2-cp313-cp313-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:d248d8c23c67d2291ffd47af766e2a3aa9fa1c6703155c099feb11f526c63a92"}, + {file = "greenlet-3.3.2-cp313-cp313-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:ccd21bb86944ca9be6d967cf7691e658e43417782bce90b5d2faeda0ff78a7dd"}, + {file = "greenlet-3.3.2-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b6997d360a4e6a4e936c0f9625b1c20416b8a0ea18a8e19cabbefc712e7397ab"}, + {file = "greenlet-3.3.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:64970c33a50551c7c50491671265d8954046cb6e8e2999aacdd60e439b70418a"}, + {file = "greenlet-3.3.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:1a9172f5bf6bd88e6ba5a84e0a68afeac9dc7b6b412b245dd64f52d83c81e55b"}, + {file = "greenlet-3.3.2-cp313-cp313-win_amd64.whl", hash = "sha256:a7945dd0eab63ded0a48e4dcade82939783c172290a7903ebde9e184333ca124"}, + {file = "greenlet-3.3.2-cp313-cp313-win_arm64.whl", hash = "sha256:394ead29063ee3515b4e775216cb756b2e3b4a7e55ae8fd884f17fa579e6b327"}, + {file = "greenlet-3.3.2-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:8d1658d7291f9859beed69a776c10822a0a799bc4bfe1bd4272bb60e62507dab"}, + {file = "greenlet-3.3.2-cp314-cp314-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:18cb1b7337bca281915b3c5d5ae19f4e76d35e1df80f4ad3c1a7be91fadf1082"}, + {file = "greenlet-3.3.2-cp314-cp314-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c2e47408e8ce1c6f1ceea0dffcdf6ebb85cc09e55c7af407c99f1112016e45e9"}, + {file = "greenlet-3.3.2-cp314-cp314-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:e3cb43ce200f59483eb82949bf1835a99cf43d7571e900d7c8d5c62cdf25d2f9"}, + {file = "greenlet-3.3.2-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:63d10328839d1973e5ba35e98cccbca71b232b14051fd957b6f8b6e8e80d0506"}, + {file = "greenlet-3.3.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:8e4ab3cfb02993c8cc248ea73d7dae6cec0253e9afa311c9b37e603ca9fad2ce"}, + {file = "greenlet-3.3.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:94ad81f0fd3c0c0681a018a976e5c2bd2ca2d9d94895f23e7bb1af4e8af4e2d5"}, + {file = "greenlet-3.3.2-cp314-cp314-win_amd64.whl", hash = "sha256:8c4dd0f3997cf2512f7601563cc90dfb8957c0cff1e3a1b23991d4ea1776c492"}, + {file = "greenlet-3.3.2-cp314-cp314-win_arm64.whl", hash = "sha256:cd6f9e2bbd46321ba3bbb4c8a15794d32960e3b0ae2cc4d49a1a53d314805d71"}, + {file = "greenlet-3.3.2-cp314-cp314t-macosx_11_0_universal2.whl", hash = "sha256:e26e72bec7ab387ac80caa7496e0f908ff954f31065b0ffc1f8ecb1338b11b54"}, + {file = "greenlet-3.3.2-cp314-cp314t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8b466dff7a4ffda6ca975979bab80bdadde979e29fc947ac3be4451428d8b0e4"}, + {file = "greenlet-3.3.2-cp314-cp314t-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b8bddc5b73c9720bea487b3bffdb1840fe4e3656fba3bd40aa1489e9f37877ff"}, + {file = "greenlet-3.3.2-cp314-cp314t-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:59b3e2c40f6706b05a9cd299c836c6aa2378cabe25d021acd80f13abf81181cf"}, + {file = "greenlet-3.3.2-cp314-cp314t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b26b0f4428b871a751968285a1ac9648944cea09807177ac639b030bddebcea4"}, + {file = "greenlet-3.3.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:1fb39a11ee2e4d94be9a76671482be9398560955c9e568550de0224e41104727"}, + {file = "greenlet-3.3.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:20154044d9085151bc309e7689d6f7ba10027f8f5a8c0676ad398b951913d89e"}, + {file = "greenlet-3.3.2-cp314-cp314t-win_amd64.whl", hash = "sha256:c04c5e06ec3e022cbfe2cd4a846e1d4e50087444f875ff6d2c2ad8445495cf1a"}, + {file = "greenlet-3.3.2.tar.gz", hash = "sha256:2eaf067fc6d886931c7962e8c6bede15d2f01965560f3359b27c80bde2d151f2"}, +] + +[package.extras] +docs = ["Sphinx", "furo"] +test = ["objgraph", "psutil", "setuptools"] + +[[package]] +name = "groq" +version = "0.9.0" +description = "The official Python library for the groq API" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "groq-0.9.0-py3-none-any.whl", hash = "sha256:d0e46f4ad645504672bb09c8100af3ced3a7db0d5119dc13e4aca535fc455874"}, + {file = "groq-0.9.0.tar.gz", hash = "sha256:130ed5e35d3acfaab46b9e7a078eeaebf91052f4a9d71f86f87fb319b5fec332"}, +] + +[package.dependencies] +anyio = ">=3.5.0,<5" +distro = ">=1.7.0,<2" +httpx = ">=0.23.0,<1" +pydantic = ">=1.9.0,<3" +sniffio = "*" +typing-extensions = ">=4.7,<5" + +[[package]] +name = "grpcio" +version = "1.78.0" +description = "HTTP/2-based RPC framework" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "grpcio-1.78.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:7cc47943d524ee0096f973e1081cb8f4f17a4615f2116882a5f1416e4cfe92b5"}, + {file = "grpcio-1.78.0-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:c3f293fdc675ccba4db5a561048cca627b5e7bd1c8a6973ffedabe7d116e22e2"}, + {file = "grpcio-1.78.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:10a9a644b5dd5aec3b82b5b0b90d41c0fa94c85ef42cb42cf78a23291ddb5e7d"}, + {file = "grpcio-1.78.0-cp310-cp310-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:4c5533d03a6cbd7f56acfc9cfb44ea64f63d29091e40e44010d34178d392d7eb"}, + {file = "grpcio-1.78.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ff870aebe9a93a85283837801d35cd5f8814fe2ad01e606861a7fb47c762a2b7"}, + {file = "grpcio-1.78.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:391e93548644e6b2726f1bb84ed60048d4bcc424ce5e4af0843d28ca0b754fec"}, + {file = "grpcio-1.78.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:df2c8f3141f7cbd112a6ebbd760290b5849cda01884554f7c67acc14e7b1758a"}, + {file = "grpcio-1.78.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:bd8cb8026e5f5b50498a3c4f196f57f9db344dad829ffae16b82e4fdbaea2813"}, + {file = "grpcio-1.78.0-cp310-cp310-win32.whl", hash = "sha256:f8dff3d9777e5d2703a962ee5c286c239bf0ba173877cc68dc02c17d042e29de"}, + {file = "grpcio-1.78.0-cp310-cp310-win_amd64.whl", hash = "sha256:94f95cf5d532d0e717eed4fc1810e8e6eded04621342ec54c89a7c2f14b581bf"}, + {file = "grpcio-1.78.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:2777b783f6c13b92bd7b716667452c329eefd646bfb3f2e9dabea2e05dbd34f6"}, + {file = "grpcio-1.78.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:9dca934f24c732750389ce49d638069c3892ad065df86cb465b3fa3012b70c9e"}, + {file = "grpcio-1.78.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:459ab414b35f4496138d0ecd735fed26f1318af5e52cb1efbc82a09f0d5aa911"}, + {file = "grpcio-1.78.0-cp311-cp311-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:082653eecbdf290e6e3e2c276ab2c54b9e7c299e07f4221872380312d8cf395e"}, + {file = "grpcio-1.78.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:85f93781028ec63f383f6bc90db785a016319c561cc11151fbb7b34e0d012303"}, + {file = "grpcio-1.78.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:f12857d24d98441af6a1d5c87442d624411db486f7ba12550b07788f74b67b04"}, + {file = "grpcio-1.78.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5397fff416b79e4b284959642a4e95ac4b0f1ece82c9993658e0e477d40551ec"}, + {file = "grpcio-1.78.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:fbe6e89c7ffb48518384068321621b2a69cab509f58e40e4399fdd378fa6d074"}, + {file = "grpcio-1.78.0-cp311-cp311-win32.whl", hash = "sha256:6092beabe1966a3229f599d7088b38dfc8ffa1608b5b5cdda31e591e6500f856"}, + {file = "grpcio-1.78.0-cp311-cp311-win_amd64.whl", hash = "sha256:1afa62af6e23f88629f2b29ec9e52ec7c65a7176c1e0a83292b93c76ca882558"}, + {file = "grpcio-1.78.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:f9ab915a267fc47c7e88c387a3a28325b58c898e23d4995f765728f4e3dedb97"}, + {file = "grpcio-1.78.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:3f8904a8165ab21e07e58bf3e30a73f4dffc7a1e0dbc32d51c61b5360d26f43e"}, + {file = "grpcio-1.78.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:859b13906ce098c0b493af92142ad051bf64c7870fa58a123911c88606714996"}, + {file = "grpcio-1.78.0-cp312-cp312-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:b2342d87af32790f934a79c3112641e7b27d63c261b8b4395350dad43eff1dc7"}, + {file = "grpcio-1.78.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:12a771591ae40bc65ba67048fa52ef4f0e6db8279e595fd349f9dfddeef571f9"}, + {file = "grpcio-1.78.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:185dea0d5260cbb2d224c507bf2a5444d5abbb1fa3594c1ed7e4c709d5eb8383"}, + {file = "grpcio-1.78.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:51b13f9aed9d59ee389ad666b8c2214cc87b5de258fa712f9ab05f922e3896c6"}, + {file = "grpcio-1.78.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fd5f135b1bd58ab088930b3c613455796dfa0393626a6972663ccdda5b4ac6ce"}, + {file = "grpcio-1.78.0-cp312-cp312-win32.whl", hash = "sha256:94309f498bcc07e5a7d16089ab984d42ad96af1d94b5a4eb966a266d9fcabf68"}, + {file = "grpcio-1.78.0-cp312-cp312-win_amd64.whl", hash = "sha256:9566fe4ababbb2610c39190791e5b829869351d14369603702e890ef3ad2d06e"}, + {file = "grpcio-1.78.0-cp313-cp313-linux_armv7l.whl", hash = "sha256:ce3a90455492bf8bfa38e56fbbe1dbd4f872a3d8eeaf7337dc3b1c8aa28c271b"}, + {file = "grpcio-1.78.0-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:2bf5e2e163b356978b23652c4818ce4759d40f4712ee9ec5a83c4be6f8c23a3a"}, + {file = "grpcio-1.78.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8f2ac84905d12918e4e55a16da17939eb63e433dc11b677267c35568aa63fc84"}, + {file = "grpcio-1.78.0-cp313-cp313-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:b58f37edab4a3881bc6c9bca52670610e0c9ca14e2ea3cf9debf185b870457fb"}, + {file = "grpcio-1.78.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:735e38e176a88ce41840c21bb49098ab66177c64c82426e24e0082500cc68af5"}, + {file = "grpcio-1.78.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:2045397e63a7a0ee7957c25f7dbb36ddc110e0cfb418403d110c0a7a68a844e9"}, + {file = "grpcio-1.78.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:a9f136fbafe7ccf4ac7e8e0c28b31066e810be52d6e344ef954a3a70234e1702"}, + {file = "grpcio-1.78.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:748b6138585379c737adc08aeffd21222abbda1a86a0dca2a39682feb9196c20"}, + {file = "grpcio-1.78.0-cp313-cp313-win32.whl", hash = "sha256:271c73e6e5676afe4fc52907686670c7cea22ab2310b76a59b678403ed40d670"}, + {file = "grpcio-1.78.0-cp313-cp313-win_amd64.whl", hash = "sha256:f2d4e43ee362adfc05994ed479334d5a451ab7bc3f3fee1b796b8ca66895acb4"}, + {file = "grpcio-1.78.0-cp314-cp314-linux_armv7l.whl", hash = "sha256:e87cbc002b6f440482b3519e36e1313eb5443e9e9e73d6a52d43bd2004fcfd8e"}, + {file = "grpcio-1.78.0-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:c41bc64626db62e72afec66b0c8a0da76491510015417c127bfc53b2fe6d7f7f"}, + {file = "grpcio-1.78.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8dfffba826efcf366b1e3ccc37e67afe676f290e13a3b48d31a46739f80a8724"}, + {file = "grpcio-1.78.0-cp314-cp314-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:74be1268d1439eaaf552c698cdb11cd594f0c49295ae6bb72c34ee31abbe611b"}, + {file = "grpcio-1.78.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:be63c88b32e6c0f1429f1398ca5c09bc64b0d80950c8bb7807d7d7fb36fb84c7"}, + {file = "grpcio-1.78.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:3c586ac70e855c721bda8f548d38c3ca66ac791dc49b66a8281a1f99db85e452"}, + {file = "grpcio-1.78.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:35eb275bf1751d2ffbd8f57cdbc46058e857cf3971041521b78b7db94bdaf127"}, + {file = "grpcio-1.78.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:207db540302c884b8848036b80db352a832b99dfdf41db1eb554c2c2c7800f65"}, + {file = "grpcio-1.78.0-cp314-cp314-win32.whl", hash = "sha256:57bab6deef2f4f1ca76cc04565df38dc5713ae6c17de690721bdf30cb1e0545c"}, + {file = "grpcio-1.78.0-cp314-cp314-win_amd64.whl", hash = "sha256:dce09d6116df20a96acfdbf85e4866258c3758180e8c49845d6ba8248b6d0bbb"}, + {file = "grpcio-1.78.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:86f85dd7c947baa707078a236288a289044836d4b640962018ceb9cd1f899af5"}, + {file = "grpcio-1.78.0-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:de8cb00d1483a412a06394b8303feec5dcb3b55f81d83aa216dbb6a0b86a94f5"}, + {file = "grpcio-1.78.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:e888474dee2f59ff68130f8a397792d8cb8e17e6b3434339657ba4ee90845a8c"}, + {file = "grpcio-1.78.0-cp39-cp39-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:86ce2371bfd7f212cf60d8517e5e854475c2c43ce14aa910e136ace72c6db6c1"}, + {file = "grpcio-1.78.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:b0c689c02947d636bc7fab3e30cc3a3445cca99c834dfb77cd4a6cabfc1c5597"}, + {file = "grpcio-1.78.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:ce7599575eeb25c0f4dc1be59cada6219f3b56176f799627f44088b21381a28a"}, + {file = "grpcio-1.78.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:684083fd383e9dc04c794adb838d4faea08b291ce81f64ecd08e4577c7398adf"}, + {file = "grpcio-1.78.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:ab399ef5e3cd2a721b1038a0f3021001f19c5ab279f145e1146bb0b9f1b2b12c"}, + {file = "grpcio-1.78.0-cp39-cp39-win32.whl", hash = "sha256:f3d6379493e18ad4d39537a82371c5281e153e963cecb13f953ebac155756525"}, + {file = "grpcio-1.78.0-cp39-cp39-win_amd64.whl", hash = "sha256:5361a0630a7fdb58a6a97638ab70e1dae2893c4d08d7aba64ded28bb9e7a29df"}, + {file = "grpcio-1.78.0.tar.gz", hash = "sha256:7382b95189546f375c174f53a5fa873cef91c4b8005faa05cc5b3beea9c4f1c5"}, +] + +[package.dependencies] +typing-extensions = ">=4.12,<5.0" + +[package.extras] +protobuf = ["grpcio-tools (>=1.78.0)"] + +[[package]] +name = "grpcio-tools" +version = "1.78.0" +description = "Protobuf code generator for gRPC" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "grpcio_tools-1.78.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:ea64e38d1caa2b8468b08cb193f5a091d169b6dbfe1c7dac37d746651ab9d84e"}, + {file = "grpcio_tools-1.78.0-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:4003fcd5cbb5d578b06176fd45883a72a8f9203152149b7c680ce28653ad9e3a"}, + {file = "grpcio_tools-1.78.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:fe6b0081775394c61ec633c9ff5dbc18337100eabb2e946b5c83967fe43b2748"}, + {file = "grpcio_tools-1.78.0-cp310-cp310-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:7e989ad2cd93db52d7f1a643ecaa156ac55bf0484f1007b485979ce8aef62022"}, + {file = "grpcio_tools-1.78.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:b874991797e96c41a37e563236c3317ed41b915eff25b292b202d6277d30da85"}, + {file = "grpcio_tools-1.78.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:daa8c288b728228377aaf758925692fc6068939d9fa32f92ca13dedcbeb41f33"}, + {file = "grpcio_tools-1.78.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:87e648759b06133199f4bc0c0053e3819f4ec3b900dc399e1097b6065db998b5"}, + {file = "grpcio_tools-1.78.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f3d3ced52bfe39eba3d24f5a8fab4e12d071959384861b41f0c52ca5399d6920"}, + {file = "grpcio_tools-1.78.0-cp310-cp310-win32.whl", hash = "sha256:4bb6ed690d417b821808796221bde079377dff98fdc850ac157ad2f26cda7a36"}, + {file = "grpcio_tools-1.78.0-cp310-cp310-win_amd64.whl", hash = "sha256:0c676d8342fd53bd85a5d5f0d070cd785f93bc040510014708ede6fcb32fada1"}, + {file = "grpcio_tools-1.78.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:6a8b8b7b49f319d29dbcf507f62984fa382d1d10437d75c3f26db5f09c4ac0af"}, + {file = "grpcio_tools-1.78.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:d62cf3b68372b0c6d722a6165db41b976869811abeabc19c8522182978d8db10"}, + {file = "grpcio_tools-1.78.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:fa9056742efeaf89d5fe14198af71e5cbc4fbf155d547b89507e19d6025906c6"}, + {file = "grpcio_tools-1.78.0-cp311-cp311-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:e3191af125dcb705aa6bc3856ba81ba99b94121c1b6ebee152e66ea084672831"}, + {file = "grpcio_tools-1.78.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:283239ddbb67ae83fac111c61b25d8527a1dbd355b377cbc8383b79f1329944d"}, + {file = "grpcio_tools-1.78.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ac977508c0db15301ef36d6c79769ec1a6cc4e3bc75735afca7fe7e360cead3a"}, + {file = "grpcio_tools-1.78.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4ff605e25652a0bd13aa8a73a09bc48669c68170902f5d2bf1468a57d5e78771"}, + {file = "grpcio_tools-1.78.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0197d7b561c79be78ab93d0fe2836c8def470683df594bae3ac89dd8e5c821b2"}, + {file = "grpcio_tools-1.78.0-cp311-cp311-win32.whl", hash = "sha256:28f71f591f7f39555863ced84fcc209cbf4454e85ef957232f43271ee99af577"}, + {file = "grpcio_tools-1.78.0-cp311-cp311-win_amd64.whl", hash = "sha256:5a6de495dabf86a3b40b9a7492994e1232b077af9d63080811838b781abbe4e8"}, + {file = "grpcio_tools-1.78.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:9eb122da57d4cad7d339fc75483116f0113af99e8d2c67f3ef9cae7501d806e4"}, + {file = "grpcio_tools-1.78.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:d0c501b8249940b886420e6935045c44cb818fa6f265f4c2b97d5cff9cb5e796"}, + {file = "grpcio_tools-1.78.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:77e5aa2d2a7268d55b1b113f958264681ef1994c970f69d48db7d4683d040f57"}, + {file = "grpcio_tools-1.78.0-cp312-cp312-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:8e3c0b0e6ba5275322ba29a97bf890565a55f129f99a21b121145e9e93a22525"}, + {file = "grpcio_tools-1.78.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:975d4cb48694e20ebd78e1643e5f1cd94cdb6a3d38e677a8e84ae43665aa4790"}, + {file = "grpcio_tools-1.78.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:553ff18c5d52807dedecf25045ae70bad7a3dbba0b27a9a3cdd9bcf0a1b7baec"}, + {file = "grpcio_tools-1.78.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:8c7f5e4af5a84d2e96c862b1a65e958a538237e268d5f8203a3a784340975b51"}, + {file = "grpcio_tools-1.78.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:96183e2b44afc3f9a761e9d0f985c3b44e03e8bb98e626241a6cbfb3b6f7e88f"}, + {file = "grpcio_tools-1.78.0-cp312-cp312-win32.whl", hash = "sha256:2250e8424c565a88573f7dc10659a0b92802e68c2a1d57e41872c9b88ccea7a6"}, + {file = "grpcio_tools-1.78.0-cp312-cp312-win_amd64.whl", hash = "sha256:217d1fa29de14d9c567d616ead7cb0fef33cde36010edff5a9390b00d52e5094"}, + {file = "grpcio_tools-1.78.0-cp313-cp313-linux_armv7l.whl", hash = "sha256:2d6de1cc23bdc1baafc23e201b1e48c617b8c1418b4d8e34cebf72141676e5fb"}, + {file = "grpcio_tools-1.78.0-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:2afeaad88040894c76656202ff832cb151bceb05c0e6907e539d129188b1e456"}, + {file = "grpcio_tools-1.78.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:33cc593735c93c03d63efe7a8ba25f3c66f16c52f0651910712490244facad72"}, + {file = "grpcio_tools-1.78.0-cp313-cp313-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:2921d7989c4d83b71f03130ab415fa4d66e6693b8b8a1fcbb7a1c67cff19b812"}, + {file = "grpcio_tools-1.78.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:e6a0df438e82c804c7b95e3f311c97c2f876dcc36376488d5b736b7bcf5a9b45"}, + {file = "grpcio_tools-1.78.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e9c6070a9500798225191ef25d0055a15d2c01c9c8f2ee7b681fffa99c98c822"}, + {file = "grpcio_tools-1.78.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:394e8b57d85370a62e5b0a4d64c96fcf7568345c345d8590c821814d227ecf1d"}, + {file = "grpcio_tools-1.78.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a3ef700293ab375e111a2909d87434ed0a0b086adf0ce67a8d9cf12ea7765e63"}, + {file = "grpcio_tools-1.78.0-cp313-cp313-win32.whl", hash = "sha256:6993b960fec43a8d840ee5dc20247ef206c1a19587ea49fe5e6cc3d2a09c1585"}, + {file = "grpcio_tools-1.78.0-cp313-cp313-win_amd64.whl", hash = "sha256:275ce3c2978842a8cf9dd88dce954e836e590cf7029649ad5d1145b779039ed5"}, + {file = "grpcio_tools-1.78.0-cp314-cp314-linux_armv7l.whl", hash = "sha256:8b080d0d072e6032708a3a91731b808074d7ab02ca8fb9847b6a011fdce64cd9"}, + {file = "grpcio_tools-1.78.0-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:8c0ad8f8f133145cd7008b49cb611a5c6a9d89ab276c28afa17050516e801f79"}, + {file = "grpcio_tools-1.78.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:2f8ea092a7de74c6359335d36f0674d939a3c7e1a550f4c2c9e80e0226de8fe4"}, + {file = "grpcio_tools-1.78.0-cp314-cp314-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:da422985e0cac822b41822f43429c19ecb27c81ffe3126d0b74e77edec452608"}, + {file = "grpcio_tools-1.78.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4fab1faa3fbcb246263e68da7a8177d73772283f9db063fb8008517480888d26"}, + {file = "grpcio_tools-1.78.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:dd9c094f73f734becae3f20f27d4944d3cd8fb68db7338ee6c58e62fc5c3d99f"}, + {file = "grpcio_tools-1.78.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:2ed51ce6b833068f6c580b73193fc2ec16468e6bc18354bc2f83a58721195a58"}, + {file = "grpcio_tools-1.78.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:05803a5cdafe77c8bdf36aa660ad7a6a1d9e49bc59ce45c1bade2a4698826599"}, + {file = "grpcio_tools-1.78.0-cp314-cp314-win32.whl", hash = "sha256:f7c722e9ce6f11149ac5bddd5056e70aaccfd8168e74e9d34d8b8b588c3f5c7c"}, + {file = "grpcio_tools-1.78.0-cp314-cp314-win_amd64.whl", hash = "sha256:7d58ade518b546120ec8f0a8e006fc8076ae5df151250ebd7e82e9b5e152c229"}, + {file = "grpcio_tools-1.78.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:30b1eef2afb6f2c3deb94525d60aedfea807d4937b5e23ad72600e3f8cd1c768"}, + {file = "grpcio_tools-1.78.0-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:c70b07b2610db3743d831700301eb17a9e1de2818d1f36ad53cb5b8b593a5749"}, + {file = "grpcio_tools-1.78.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:f6d53392eb0f758eaa9ecfa6f9aab1e1f3c9db117a4242c802a30363fdc404d2"}, + {file = "grpcio_tools-1.78.0-cp39-cp39-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:638fa11b4731dce2c662f685c3be0489246e8d2306654eb26ebd71e6a24c4b70"}, + {file = "grpcio_tools-1.78.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:21b31c87cef35af124f1cfb105614725b462656d2684f59d05a6210266b17b9e"}, + {file = "grpcio_tools-1.78.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:b81b4cf356272512172a604d4467af9b373de69cd69e1ac163fb41f7dac33099"}, + {file = "grpcio_tools-1.78.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:5c8ceb32cd818e40739529b3c3143a30c899c247db22a6275c4798dece9a4ae7"}, + {file = "grpcio_tools-1.78.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1872d01f984c85ee49ce581fcaffbcc9c792692b4b5ebf9bba4358fc895c316a"}, + {file = "grpcio_tools-1.78.0-cp39-cp39-win32.whl", hash = "sha256:4eff49de5f8f320ed2a69bbb6bfe512175b1762d736cfce28aca0129939f7252"}, + {file = "grpcio_tools-1.78.0-cp39-cp39-win_amd64.whl", hash = "sha256:6ddf7e7a7d069e7287b9cb68937102efe1686e63117a162d01578ac2839b4acd"}, + {file = "grpcio_tools-1.78.0.tar.gz", hash = "sha256:4b0dd86560274316e155d925158276f8564508193088bc43e20d3f5dff956b2b"}, +] + +[package.dependencies] +grpcio = ">=1.78.0" +protobuf = ">=6.31.1,<7.0.0" +setuptools = ">=77.0.1" + +[[package]] +name = "h11" +version = "0.16.0" +description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86"}, + {file = "h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1"}, +] + +[[package]] +name = "hatchet-sdk" +version = "1.25.2" +description = "This is the official Python SDK for Hatchet, a distributed, fault-tolerant task queue. The SDK allows you to easily integrate Hatchet's task scheduling and workflow orchestration capabilities into your Python applications." +optional = false +python-versions = "^3.10" +groups = ["main"] +files = [] +develop = true + +[package.dependencies] +aiohttp = "^3.10.5" +grpcio = "^1.76.0" +grpcio-tools = "^1.76.0" +prometheus-client = ">=0.21.1" +protobuf = "^6.30.2" +pydantic = "^2.6.3" +pydantic-settings = "^2.7.1" +python-dateutil = "^2.9.0.post0" +tenacity = ">=8.4.1" +urllib3 = "^2.6.0" + +[package.extras] +otel = ["opentelemetry-api (>=1.28.0,<2.0.0)", "opentelemetry-distro (>=0.49b0)", "opentelemetry-exporter-otlp (>=1.28.0,<2.0.0)", "opentelemetry-exporter-otlp-proto-http (>=1.28.0,<2.0.0)", "opentelemetry-instrumentation (>=0.49b0)", "opentelemetry-sdk (>=1.28.0,<2.0.0)"] +v0-sdk = [] + +[package.source] +type = "directory" +url = "../../python" + +[[package]] +name = "hf-xet" +version = "1.3.2" +description = "Fast transfer of large files with the Hugging Face Hub." +optional = false +python-versions = ">=3.8" +groups = ["main"] +markers = "platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"arm64\" or platform_machine == \"aarch64\"" +files = [ + {file = "hf_xet-1.3.2-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:335a8f36c55fd35a92d0062f4e9201b4015057e62747b7e7001ffb203c0ee1d2"}, + {file = "hf_xet-1.3.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:c1ae4d3a716afc774e66922f3cac8206bfa707db13f6a7e62dfff74bfc95c9a8"}, + {file = "hf_xet-1.3.2-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d6dbdf231efac0b9b39adcf12a07f0c030498f9212a18e8c50224d0e84ab803d"}, + {file = "hf_xet-1.3.2-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:c1980abfb68ecf6c1c7983379ed7b1e2b49a1aaf1a5aca9acc7d48e5e2e0a961"}, + {file = "hf_xet-1.3.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:1c88fbd90ad0d27c46b77a445f0a436ebaa94e14965c581123b68b1c52f5fd30"}, + {file = "hf_xet-1.3.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:35b855024ca37f2dd113ac1c08993e997fbe167b9d61f9ef66d3d4f84015e508"}, + {file = "hf_xet-1.3.2-cp313-cp313t-win_amd64.whl", hash = "sha256:31612ba0629046e425ba50375685a2586e11fb9144270ebabd75878c3eaf6378"}, + {file = "hf_xet-1.3.2-cp313-cp313t-win_arm64.whl", hash = "sha256:433c77c9f4e132b562f37d66c9b22c05b5479f243a1f06a120c1c06ce8b1502a"}, + {file = "hf_xet-1.3.2-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:c34e2c7aefad15792d57067c1c89b2b02c1bbaeabd7f8456ae3d07b4bbaf4094"}, + {file = "hf_xet-1.3.2-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:4bc995d6c41992831f762096020dc14a65fdf3963f86ffed580b596d04de32e3"}, + {file = "hf_xet-1.3.2-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:959083c89dee30f7d6f890b36cdadda823386c4de63b1a30384a75bfd2ae995d"}, + {file = "hf_xet-1.3.2-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:cfa760888633b08c01b398d212ce7e8c0d7adac6c86e4b20dfb2397d8acd78ee"}, + {file = "hf_xet-1.3.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:3155a02e083aa21fd733a7485c7c36025e49d5975c8d6bda0453d224dd0b0ac4"}, + {file = "hf_xet-1.3.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:91b1dc03c31cbf733d35dc03df7c5353686233d86af045e716f1e0ea4a2673cf"}, + {file = "hf_xet-1.3.2-cp314-cp314t-win_amd64.whl", hash = "sha256:211f30098512d95e85ad03ae63bd7dd2c4df476558a5095d09f9e38e78cbf674"}, + {file = "hf_xet-1.3.2-cp314-cp314t-win_arm64.whl", hash = "sha256:4a6817c41de7c48ed9270da0b02849347e089c5ece9a0e72ae4f4b3a57617f82"}, + {file = "hf_xet-1.3.2-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:f93b7595f1d8fefddfede775c18b5c9256757824f7f6832930b49858483cd56f"}, + {file = "hf_xet-1.3.2-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:a85d3d43743174393afe27835bde0cd146e652b5fcfdbcd624602daef2ef3259"}, + {file = "hf_xet-1.3.2-cp37-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7c2a054a97c44e136b1f7f5a78f12b3efffdf2eed3abc6746fc5ea4b39511633"}, + {file = "hf_xet-1.3.2-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:06b724a361f670ae557836e57801b82c75b534812e351a87a2c739f77d1e0635"}, + {file = "hf_xet-1.3.2-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:305f5489d7241a47e0458ef49334be02411d1d0f480846363c1c8084ed9916f7"}, + {file = "hf_xet-1.3.2-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:06cdbde243c85f39a63b28e9034321399c507bcd5e7befdd17ed2ccc06dfe14e"}, + {file = "hf_xet-1.3.2-cp37-abi3-win_amd64.whl", hash = "sha256:9298b47cce6037b7045ae41482e703c471ce36b52e73e49f71226d2e8e5685a1"}, + {file = "hf_xet-1.3.2-cp37-abi3-win_arm64.whl", hash = "sha256:83d8ec273136171431833a6957e8f3af496bee227a0fe47c7b8b39c106d1749a"}, + {file = "hf_xet-1.3.2.tar.gz", hash = "sha256:e130ee08984783d12717444e538587fa2119385e5bd8fc2bb9f930419b73a7af"}, +] + +[package.extras] +tests = ["pytest"] + +[[package]] +name = "httpcore" +version = "1.0.9" +description = "A minimal low-level HTTP client." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55"}, + {file = "httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8"}, +] + +[package.dependencies] +certifi = "*" +h11 = ">=0.16" + +[package.extras] +asyncio = ["anyio (>=4.0,<5.0)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] +trio = ["trio (>=0.22.0,<1.0)"] + +[[package]] +name = "httpx" +version = "0.27.2" +description = "The next generation HTTP client." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "httpx-0.27.2-py3-none-any.whl", hash = "sha256:7bb2708e112d8fdd7829cd4243970f0c223274051cb35ee80c03301ee29a3df0"}, + {file = "httpx-0.27.2.tar.gz", hash = "sha256:f7c2be1d2f3c3c3160d441802406b206c2b76f5947b11115e6df10c6c65e66c2"}, +] + +[package.dependencies] +anyio = "*" +certifi = "*" +httpcore = "==1.*" +idna = "*" +sniffio = "*" + +[package.extras] +brotli = ["brotli ; platform_python_implementation == \"CPython\"", "brotlicffi ; platform_python_implementation != \"CPython\""] +cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "huggingface-hub" +version = "1.5.0" +description = "Client library to download and publish models, datasets and other repos on the huggingface.co hub" +optional = false +python-versions = ">=3.9.0" +groups = ["main"] +files = [ + {file = "huggingface_hub-1.5.0-py3-none-any.whl", hash = "sha256:c9c0b3ab95a777fc91666111f3b3ede71c0cdced3614c553a64e98920585c4ee"}, + {file = "huggingface_hub-1.5.0.tar.gz", hash = "sha256:f281838db29265880fb543de7a23b0f81d3504675de82044307ea3c6c62f799d"}, +] + +[package.dependencies] +filelock = ">=3.10.0" +fsspec = ">=2023.5.0" +hf-xet = {version = ">=1.2.0,<2.0.0", markers = "platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"arm64\" or platform_machine == \"aarch64\""} +httpx = ">=0.23.0,<1" +packaging = ">=20.9" +pyyaml = ">=5.1" +tqdm = ">=4.42.1" +typer = "*" +typing-extensions = ">=4.1.0" + +[package.extras] +all = ["Jinja2", "Pillow", "authlib (>=1.3.2)", "fastapi", "fastapi", "httpx", "itsdangerous", "jedi", "libcst (>=1.4.0)", "mypy (==1.15.0)", "numpy", "pytest (>=8.4.2)", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-mock", "pytest-rerunfailures (<16.0)", "pytest-vcr", "pytest-xdist", "ruff (>=0.9.0)", "soundfile", "ty", "types-PyYAML", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"] +dev = ["Jinja2", "Pillow", "authlib (>=1.3.2)", "fastapi", "fastapi", "httpx", "itsdangerous", "jedi", "libcst (>=1.4.0)", "mypy (==1.15.0)", "numpy", "pytest (>=8.4.2)", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-mock", "pytest-rerunfailures (<16.0)", "pytest-vcr", "pytest-xdist", "ruff (>=0.9.0)", "soundfile", "ty", "types-PyYAML", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"] +fastai = ["fastai (>=2.4)", "fastcore (>=1.3.27)", "toml"] +hf-xet = ["hf-xet (>=1.2.0,<2.0.0)"] +mcp = ["mcp (>=1.8.0)"] +oauth = ["authlib (>=1.3.2)", "fastapi", "httpx", "itsdangerous"] +quality = ["libcst (>=1.4.0)", "mypy (==1.15.0)", "ruff (>=0.9.0)", "ty"] +testing = ["Jinja2", "Pillow", "authlib (>=1.3.2)", "fastapi", "fastapi", "httpx", "itsdangerous", "jedi", "numpy", "pytest (>=8.4.2)", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-mock", "pytest-rerunfailures (<16.0)", "pytest-vcr", "pytest-xdist", "soundfile", "urllib3 (<2.0)"] +torch = ["safetensors[torch]", "torch"] +typing = ["types-PyYAML", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)"] + +[[package]] +name = "idna" +version = "3.11" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea"}, + {file = "idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902"}, +] + +[package.extras] +all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] + +[[package]] +name = "jiter" +version = "0.13.0" +description = "Fast iterable JSON parser." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "jiter-0.13.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2ffc63785fd6c7977defe49b9824ae6ce2b2e2b77ce539bdaf006c26da06342e"}, + {file = "jiter-0.13.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4a638816427006c1e3f0013eb66d391d7a3acda99a7b0cf091eff4497ccea33a"}, + {file = "jiter-0.13.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:19928b5d1ce0ff8c1ee1b9bdef3b5bfc19e8304f1b904e436caf30bc15dc6cf5"}, + {file = "jiter-0.13.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:309549b778b949d731a2f0e1594a3f805716be704a73bf3ad9a807eed5eb5721"}, + {file = "jiter-0.13.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bcdabaea26cb04e25df3103ce47f97466627999260290349a88c8136ecae0060"}, + {file = "jiter-0.13.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a3a377af27b236abbf665a69b2bdd680e3b5a0bd2af825cd3b81245279a7606c"}, + {file = "jiter-0.13.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe49d3ff6db74321f144dff9addd4a5874d3105ac5ba7c5b77fac099cfae31ae"}, + {file = "jiter-0.13.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2113c17c9a67071b0f820733c0893ed1d467b5fcf4414068169e5c2cabddb1e2"}, + {file = "jiter-0.13.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ab1185ca5c8b9491b55ebf6c1e8866b8f68258612899693e24a92c5fdb9455d5"}, + {file = "jiter-0.13.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9621ca242547edc16400981ca3231e0c91c0c4c1ab8573a596cd9bb3575d5c2b"}, + {file = "jiter-0.13.0-cp310-cp310-win32.whl", hash = "sha256:a7637d92b1c9d7a771e8c56f445c7f84396d48f2e756e5978840ecba2fac0894"}, + {file = "jiter-0.13.0-cp310-cp310-win_amd64.whl", hash = "sha256:c1b609e5cbd2f52bb74fb721515745b407df26d7b800458bd97cb3b972c29e7d"}, + {file = "jiter-0.13.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:ea026e70a9a28ebbdddcbcf0f1323128a8db66898a06eaad3a4e62d2f554d096"}, + {file = "jiter-0.13.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:66aa3e663840152d18cc8ff1e4faad3dd181373491b9cfdc6004b92198d67911"}, + {file = "jiter-0.13.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c3524798e70655ff19aec58c7d05adb1f074fecff62da857ea9be2b908b6d701"}, + {file = "jiter-0.13.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ec7e287d7fbd02cb6e22f9a00dd9c9cd504c40a61f2c61e7e1f9690a82726b4c"}, + {file = "jiter-0.13.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:47455245307e4debf2ce6c6e65a717550a0244231240dcf3b8f7d64e4c2f22f4"}, + {file = "jiter-0.13.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ee9da221dca6e0429c2704c1b3655fe7b025204a71d4d9b73390c759d776d165"}, + {file = "jiter-0.13.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24ab43126d5e05f3d53a36a8e11eb2f23304c6c1117844aaaf9a0aa5e40b5018"}, + {file = "jiter-0.13.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9da38b4fedde4fb528c740c2564628fbab737166a0e73d6d46cb4bb5463ff411"}, + {file = "jiter-0.13.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0b34c519e17658ed88d5047999a93547f8889f3c1824120c26ad6be5f27b6cf5"}, + {file = "jiter-0.13.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d2a6394e6af690d462310a86b53c47ad75ac8c21dc79f120714ea449979cb1d3"}, + {file = "jiter-0.13.0-cp311-cp311-win32.whl", hash = "sha256:0f0c065695f616a27c920a56ad0d4fc46415ef8b806bf8fc1cacf25002bd24e1"}, + {file = "jiter-0.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:0733312953b909688ae3c2d58d043aa040f9f1a6a75693defed7bc2cc4bf2654"}, + {file = "jiter-0.13.0-cp311-cp311-win_arm64.whl", hash = "sha256:5d9b34ad56761b3bf0fbe8f7e55468704107608512350962d3317ffd7a4382d5"}, + {file = "jiter-0.13.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:0a2bd69fc1d902e89925fc34d1da51b2128019423d7b339a45d9e99c894e0663"}, + {file = "jiter-0.13.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f917a04240ef31898182f76a332f508f2cc4b57d2b4d7ad2dbfebbfe167eb505"}, + {file = "jiter-0.13.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1e2b199f446d3e82246b4fd9236d7cb502dc2222b18698ba0d986d2fecc6152"}, + {file = "jiter-0.13.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:04670992b576fa65bd056dbac0c39fe8bd67681c380cb2b48efa885711d9d726"}, + {file = "jiter-0.13.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5a1aff1fbdb803a376d4d22a8f63f8e7ccbce0b4890c26cc7af9e501ab339ef0"}, + {file = "jiter-0.13.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b3fb8c2053acaef8580809ac1d1f7481a0a0bdc012fd7f5d8b18fb696a5a089"}, + {file = "jiter-0.13.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bdaba7d87e66f26a2c45d8cbadcbfc4bf7884182317907baf39cfe9775bb4d93"}, + {file = "jiter-0.13.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7b88d649135aca526da172e48083da915ec086b54e8e73a425ba50999468cc08"}, + {file = "jiter-0.13.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e404ea551d35438013c64b4f357b0474c7abf9f781c06d44fcaf7a14c69ff9e2"}, + {file = "jiter-0.13.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1f4748aad1b4a93c8bdd70f604d0f748cdc0e8744c5547798acfa52f10e79228"}, + {file = "jiter-0.13.0-cp312-cp312-win32.whl", hash = "sha256:0bf670e3b1445fc4d31612199f1744f67f889ee1bbae703c4b54dc097e5dd394"}, + {file = "jiter-0.13.0-cp312-cp312-win_amd64.whl", hash = "sha256:15db60e121e11fe186c0b15236bd5d18381b9ddacdcf4e659feb96fc6c969c92"}, + {file = "jiter-0.13.0-cp312-cp312-win_arm64.whl", hash = "sha256:41f92313d17989102f3cb5dd533a02787cdb99454d494344b0361355da52fcb9"}, + {file = "jiter-0.13.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1f8a55b848cbabf97d861495cd65f1e5c590246fabca8b48e1747c4dfc8f85bf"}, + {file = "jiter-0.13.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f556aa591c00f2c45eb1b89f68f52441a016034d18b65da60e2d2875bbbf344a"}, + {file = "jiter-0.13.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f7e1d61da332ec412350463891923f960c3073cf1aae93b538f0bb4c8cd46efb"}, + {file = "jiter-0.13.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3097d665a27bc96fd9bbf7f86178037db139f319f785e4757ce7ccbf390db6c2"}, + {file = "jiter-0.13.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9d01ecc3a8cbdb6f25a37bd500510550b64ddf9f7d64a107d92f3ccb25035d0f"}, + {file = "jiter-0.13.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ed9bbc30f5d60a3bdf63ae76beb3f9db280d7f195dfcfa61af792d6ce912d159"}, + {file = "jiter-0.13.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98fbafb6e88256f4454de33c1f40203d09fc33ed19162a68b3b257b29ca7f663"}, + {file = "jiter-0.13.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5467696f6b827f1116556cb0db620440380434591e93ecee7fd14d1a491b6daa"}, + {file = "jiter-0.13.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:2d08c9475d48b92892583df9da592a0e2ac49bcd41fae1fec4f39ba6cf107820"}, + {file = "jiter-0.13.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:aed40e099404721d7fcaf5b89bd3b4568a4666358bcac7b6b15c09fb6252ab68"}, + {file = "jiter-0.13.0-cp313-cp313-win32.whl", hash = "sha256:36ebfbcffafb146d0e6ffb3e74d51e03d9c35ce7c625c8066cdbfc7b953bdc72"}, + {file = "jiter-0.13.0-cp313-cp313-win_amd64.whl", hash = "sha256:8d76029f077379374cf0dbc78dbe45b38dec4a2eb78b08b5194ce836b2517afc"}, + {file = "jiter-0.13.0-cp313-cp313-win_arm64.whl", hash = "sha256:bb7613e1a427cfcb6ea4544f9ac566b93d5bf67e0d48c787eca673ff9c9dff2b"}, + {file = "jiter-0.13.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:fa476ab5dd49f3bf3a168e05f89358c75a17608dbabb080ef65f96b27c19ab10"}, + {file = "jiter-0.13.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ade8cb6ff5632a62b7dbd4757d8c5573f7a2e9ae285d6b5b841707d8363205ef"}, + {file = "jiter-0.13.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9950290340acc1adaded363edd94baebcee7dabdfa8bee4790794cd5cfad2af6"}, + {file = "jiter-0.13.0-cp313-cp313t-win_amd64.whl", hash = "sha256:2b4972c6df33731aac0742b64fd0d18e0a69bc7d6e03108ce7d40c85fd9e3e6d"}, + {file = "jiter-0.13.0-cp313-cp313t-win_arm64.whl", hash = "sha256:701a1e77d1e593c1b435315ff625fd071f0998c5f02792038a5ca98899261b7d"}, + {file = "jiter-0.13.0-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:cc5223ab19fe25e2f0bf2643204ad7318896fe3729bf12fde41b77bfc4fafff0"}, + {file = "jiter-0.13.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:9776ebe51713acf438fd9b4405fcd86893ae5d03487546dae7f34993217f8a91"}, + {file = "jiter-0.13.0-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:879e768938e7b49b5e90b7e3fecc0dbec01b8cb89595861fb39a8967c5220d09"}, + {file = "jiter-0.13.0-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:682161a67adea11e3aae9038c06c8b4a9a71023228767477d683f69903ebc607"}, + {file = "jiter-0.13.0-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a13b68cd1cd8cc9de8f244ebae18ccb3e4067ad205220ef324c39181e23bbf66"}, + {file = "jiter-0.13.0-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87ce0f14c6c08892b610686ae8be350bf368467b6acd5085a5b65441e2bf36d2"}, + {file = "jiter-0.13.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c365005b05505a90d1c47856420980d0237adf82f70c4aff7aebd3c1cc143ad"}, + {file = "jiter-0.13.0-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1317fdffd16f5873e46ce27d0e0f7f4f90f0cdf1d86bf6abeaea9f63ca2c401d"}, + {file = "jiter-0.13.0-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:c05b450d37ba0c9e21c77fef1f205f56bcee2330bddca68d344baebfc55ae0df"}, + {file = "jiter-0.13.0-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:775e10de3849d0631a97c603f996f518159272db00fdda0a780f81752255ee9d"}, + {file = "jiter-0.13.0-cp314-cp314-win32.whl", hash = "sha256:632bf7c1d28421c00dd8bbb8a3bac5663e1f57d5cd5ed962bce3c73bf62608e6"}, + {file = "jiter-0.13.0-cp314-cp314-win_amd64.whl", hash = "sha256:f22ef501c3f87ede88f23f9b11e608581c14f04db59b6a801f354397ae13739f"}, + {file = "jiter-0.13.0-cp314-cp314-win_arm64.whl", hash = "sha256:07b75fe09a4ee8e0c606200622e571e44943f47254f95e2436c8bdcaceb36d7d"}, + {file = "jiter-0.13.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:964538479359059a35fb400e769295d4b315ae61e4105396d355a12f7fef09f0"}, + {file = "jiter-0.13.0-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e104da1db1c0991b3eaed391ccd650ae8d947eab1480c733e5a3fb28d4313e40"}, + {file = "jiter-0.13.0-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0e3a5f0cde8ff433b8e88e41aa40131455420fb3649a3c7abdda6145f8cb7202"}, + {file = "jiter-0.13.0-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:57aab48f40be1db920a582b30b116fe2435d184f77f0e4226f546794cedd9cf0"}, + {file = "jiter-0.13.0-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7772115877c53f62beeb8fd853cab692dbc04374ef623b30f997959a4c0e7e95"}, + {file = "jiter-0.13.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1211427574b17b633cfceba5040de8081e5abf114f7a7602f73d2e16f9fdaa59"}, + {file = "jiter-0.13.0-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7beae3a3d3b5212d3a55d2961db3c292e02e302feb43fce6a3f7a31b90ea6dfe"}, + {file = "jiter-0.13.0-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:e5562a0f0e90a6223b704163ea28e831bd3a9faa3512a711f031611e6b06c939"}, + {file = "jiter-0.13.0-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:6c26a424569a59140fb51160a56df13f438a2b0967365e987889186d5fc2f6f9"}, + {file = "jiter-0.13.0-cp314-cp314t-win32.whl", hash = "sha256:24dc96eca9f84da4131cdf87a95e6ce36765c3b156fc9ae33280873b1c32d5f6"}, + {file = "jiter-0.13.0-cp314-cp314t-win_amd64.whl", hash = "sha256:0a8d76c7524087272c8ae913f5d9d608bd839154b62c4322ef65723d2e5bb0b8"}, + {file = "jiter-0.13.0-cp314-cp314t-win_arm64.whl", hash = "sha256:2c26cf47e2cad140fa23b6d58d435a7c0161f5c514284802f25e87fddfe11024"}, + {file = "jiter-0.13.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:4397ee562b9f69d283e5674445551b47a5e8076fdde75e71bfac5891113dc543"}, + {file = "jiter-0.13.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7f90023f8f672e13ea1819507d2d21b9d2d1c18920a3b3a5f1541955a85b5504"}, + {file = "jiter-0.13.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed0240dd1536a98c3ab55e929c60dfff7c899fecafcb7d01161b21a99fc8c363"}, + {file = "jiter-0.13.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6207fc61c395b26fffdcf637a0b06b4326f35bfa93c6e92fe1a166a21aeb6731"}, + {file = "jiter-0.13.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:00203f47c214156df427b5989de74cb340c65c8180d09be1bf9de81d0abad599"}, + {file = "jiter-0.13.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c26ad6967c9dcedf10c995a21539c3aa57d4abad7001b7a84f621a263a6b605"}, + {file = "jiter-0.13.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a576f5dce9ac7de5d350b8e2f552cf364f32975ed84717c35379a51c7cb198bd"}, + {file = "jiter-0.13.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b22945be8425d161f2e536cdae66da300b6b000f1c0ba3ddf237d1bfd45d21b8"}, + {file = "jiter-0.13.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:6eeb7db8bc77dc20476bc2f7407a23dbe3d46d9cc664b166e3d474e1c1de4baa"}, + {file = "jiter-0.13.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:19cd6f85e1dc090277c3ce90a5b7d96f32127681d825e71c9dce28788e39fc0c"}, + {file = "jiter-0.13.0-cp39-cp39-win32.whl", hash = "sha256:dc3ce84cfd4fa9628fe62c4f85d0d597a4627d4242cfafac32a12cc1455d00f7"}, + {file = "jiter-0.13.0-cp39-cp39-win_amd64.whl", hash = "sha256:9ffda299e417dc83362963966c50cb76d42da673ee140de8a8ac762d4bb2378b"}, + {file = "jiter-0.13.0-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:b1cbfa133241d0e6bdab48dcdc2604e8ba81512f6bbd68ec3e8e1357dd3c316c"}, + {file = "jiter-0.13.0-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:db367d8be9fad6e8ebbac4a7578b7af562e506211036cba2c06c3b998603c3d2"}, + {file = "jiter-0.13.0-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45f6f8efb2f3b0603092401dc2df79fa89ccbc027aaba4174d2d4133ed661434"}, + {file = "jiter-0.13.0-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:597245258e6ad085d064780abfb23a284d418d3e61c57362d9449c6c7317ee2d"}, + {file = "jiter-0.13.0-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:3d744a6061afba08dd7ae375dcde870cffb14429b7477e10f67e9e6d68772a0a"}, + {file = "jiter-0.13.0-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:ff732bd0a0e778f43d5009840f20b935e79087b4dc65bd36f1cd0f9b04b8ff7f"}, + {file = "jiter-0.13.0-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ab44b178f7981fcaea7e0a5df20e773c663d06ffda0198f1a524e91b2fde7e59"}, + {file = "jiter-0.13.0-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7bb00b6d26db67a05fe3e12c76edc75f32077fb51deed13822dc648fa373bc19"}, + {file = "jiter-0.13.0.tar.gz", hash = "sha256:f2839f9c2c7e2dffc1bc5929a510e14ce0a946be9365fd1219e7ef342dae14f4"}, +] + +[[package]] +name = "markdown-it-py" +version = "4.0.0" +description = "Python port of markdown-it. Markdown parsing, done right!" +optional = false +python-versions = ">=3.10" +groups = ["main"] +files = [ + {file = "markdown_it_py-4.0.0-py3-none-any.whl", hash = "sha256:87327c59b172c5011896038353a81343b6754500a08cd7a4973bb48c6d578147"}, + {file = "markdown_it_py-4.0.0.tar.gz", hash = "sha256:cb0a2b4aa34f932c007117b194e945bd74e0ec24133ceb5bac59009cda1cb9f3"}, +] + +[package.dependencies] +mdurl = ">=0.1,<1.0" + +[package.extras] +benchmarking = ["psutil", "pytest", "pytest-benchmark"] +compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "markdown-it-pyrs", "mistletoe (>=1.0,<2.0)", "mistune (>=3.0,<4.0)", "panflute (>=2.3,<3.0)"] +linkify = ["linkify-it-py (>=1,<3)"] +plugins = ["mdit-py-plugins (>=0.5.0)"] +profiling = ["gprof2dot"] +rtd = ["ipykernel", "jupyter_sphinx", "mdit-py-plugins (>=0.5.0)", "myst-parser", "pyyaml", "sphinx", "sphinx-book-theme (>=1.0,<2.0)", "sphinx-copybutton", "sphinx-design"] +testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions", "requests"] + +[[package]] +name = "mdurl" +version = "0.1.2" +description = "Markdown URL utilities" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, + {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, +] + +[[package]] +name = "multidict" +version = "6.7.1" +description = "multidict implementation" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "multidict-6.7.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:c93c3db7ea657dd4637d57e74ab73de31bccefe144d3d4ce370052035bc85fb5"}, + {file = "multidict-6.7.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:974e72a2474600827abaeda71af0c53d9ebbc3c2eb7da37b37d7829ae31232d8"}, + {file = "multidict-6.7.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cdea2e7b2456cfb6694fb113066fd0ec7ea4d67e3a35e1f4cbeea0b448bf5872"}, + {file = "multidict-6.7.1-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:17207077e29342fdc2c9a82e4b306f1127bf1ea91f8b71e02d4798a70bb99991"}, + {file = "multidict-6.7.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d4f49cb5661344764e4c7c7973e92a47a59b8fc19b6523649ec9dc4960e58a03"}, + {file = "multidict-6.7.1-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:a9fc4caa29e2e6ae408d1c450ac8bf19892c5fca83ee634ecd88a53332c59981"}, + {file = "multidict-6.7.1-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c5f0c21549ab432b57dcc82130f388d84ad8179824cc3f223d5e7cfbfd4143f6"}, + {file = "multidict-6.7.1-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:7dfb78d966b2c906ae1d28ccf6e6712a3cd04407ee5088cd276fe8cb42186190"}, + {file = "multidict-6.7.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9b0d9b91d1aa44db9c1f1ecd0d9d2ae610b2f4f856448664e01a3b35899f3f92"}, + {file = "multidict-6.7.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:dd96c01a9dcd4889dcfcf9eb5544ca0c77603f239e3ffab0524ec17aea9a93ee"}, + {file = "multidict-6.7.1-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:067343c68cd6612d375710f895337b3a98a033c94f14b9a99eff902f205424e2"}, + {file = "multidict-6.7.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:5884a04f4ff56c6120f6ccf703bdeb8b5079d808ba604d4d53aec0d55dc33568"}, + {file = "multidict-6.7.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:8affcf1c98b82bc901702eb73b6947a1bfa170823c153fe8a47b5f5f02e48e40"}, + {file = "multidict-6.7.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:0d17522c37d03e85c8098ec8431636309b2682cf12e58f4dbc76121fb50e4962"}, + {file = "multidict-6.7.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:24c0cf81544ca5e17cfcb6e482e7a82cd475925242b308b890c9452a074d4505"}, + {file = "multidict-6.7.1-cp310-cp310-win32.whl", hash = "sha256:d82dd730a95e6643802f4454b8fdecdf08667881a9c5670db85bc5a56693f122"}, + {file = "multidict-6.7.1-cp310-cp310-win_amd64.whl", hash = "sha256:cf37cbe5ced48d417ba045aca1b21bafca67489452debcde94778a576666a1df"}, + {file = "multidict-6.7.1-cp310-cp310-win_arm64.whl", hash = "sha256:59bc83d3f66b41dac1e7460aac1d196edc70c9ba3094965c467715a70ecb46db"}, + {file = "multidict-6.7.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7ff981b266af91d7b4b3793ca3382e53229088d193a85dfad6f5f4c27fc73e5d"}, + {file = "multidict-6.7.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:844c5bca0b5444adb44a623fb0a1310c2f4cd41f402126bb269cd44c9b3f3e1e"}, + {file = "multidict-6.7.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f2a0a924d4c2e9afcd7ec64f9de35fcd96915149b2216e1cb2c10a56df483855"}, + {file = "multidict-6.7.1-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:8be1802715a8e892c784c0197c2ace276ea52702a0ede98b6310c8f255a5afb3"}, + {file = "multidict-6.7.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2e2d2ed645ea29f31c4c7ea1552fcfd7cb7ba656e1eafd4134a6620c9f5fdd9e"}, + {file = "multidict-6.7.1-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:95922cee9a778659e91db6497596435777bd25ed116701a4c034f8e46544955a"}, + {file = "multidict-6.7.1-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:6b83cabdc375ffaaa15edd97eb7c0c672ad788e2687004990074d7d6c9b140c8"}, + {file = "multidict-6.7.1-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:38fb49540705369bab8484db0689d86c0a33a0a9f2c1b197f506b71b4b6c19b0"}, + {file = "multidict-6.7.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:439cbebd499f92e9aa6793016a8acaa161dfa749ae86d20960189f5398a19144"}, + {file = "multidict-6.7.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6d3bc717b6fe763b8be3f2bee2701d3c8eb1b2a8ae9f60910f1b2860c82b6c49"}, + {file = "multidict-6.7.1-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:619e5a1ac57986dbfec9f0b301d865dddf763696435e2962f6d9cf2fdff2bb71"}, + {file = "multidict-6.7.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:0b38ebffd9be37c1170d33bc0f36f4f262e0a09bc1aac1c34c7aa51a7293f0b3"}, + {file = "multidict-6.7.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:10ae39c9cfe6adedcdb764f5e8411d4a92b055e35573a2eaa88d3323289ef93c"}, + {file = "multidict-6.7.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:25167cc263257660290fba06b9318d2026e3c910be240a146e1f66dd114af2b0"}, + {file = "multidict-6.7.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:128441d052254f42989ef98b7b6a6ecb1e6f708aa962c7984235316db59f50fa"}, + {file = "multidict-6.7.1-cp311-cp311-win32.whl", hash = "sha256:d62b7f64ffde3b99d06b707a280db04fb3855b55f5a06df387236051d0668f4a"}, + {file = "multidict-6.7.1-cp311-cp311-win_amd64.whl", hash = "sha256:bdbf9f3b332abd0cdb306e7c2113818ab1e922dc84b8f8fd06ec89ed2a19ab8b"}, + {file = "multidict-6.7.1-cp311-cp311-win_arm64.whl", hash = "sha256:b8c990b037d2fff2f4e33d3f21b9b531c5745b33a49a7d6dbe7a177266af44f6"}, + {file = "multidict-6.7.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:a90f75c956e32891a4eda3639ce6dd86e87105271f43d43442a3aedf3cddf172"}, + {file = "multidict-6.7.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:3fccb473e87eaa1382689053e4a4618e7ba7b9b9b8d6adf2027ee474597128cd"}, + {file = "multidict-6.7.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b0fa96985700739c4c7853a43c0b3e169360d6855780021bfc6d0f1ce7c123e7"}, + {file = "multidict-6.7.1-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:cb2a55f408c3043e42b40cc8eecd575afa27b7e0b956dfb190de0f8499a57a53"}, + {file = "multidict-6.7.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:eb0ce7b2a32d09892b3dd6cc44877a0d02a33241fafca5f25c8b6b62374f8b75"}, + {file = "multidict-6.7.1-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:c3a32d23520ee37bf327d1e1a656fec76a2edd5c038bf43eddfa0572ec49c60b"}, + {file = "multidict-6.7.1-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:9c90fed18bffc0189ba814749fdcc102b536e83a9f738a9003e569acd540a733"}, + {file = "multidict-6.7.1-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:da62917e6076f512daccfbbde27f46fed1c98fee202f0559adec8ee0de67f71a"}, + {file = "multidict-6.7.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bfde23ef6ed9db7eaee6c37dcec08524cb43903c60b285b172b6c094711b3961"}, + {file = "multidict-6.7.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3758692429e4e32f1ba0df23219cd0b4fc0a52f476726fff9337d1a57676a582"}, + {file = "multidict-6.7.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:398c1478926eca669f2fd6a5856b6de9c0acf23a2cb59a14c0ba5844fa38077e"}, + {file = "multidict-6.7.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:c102791b1c4f3ab36ce4101154549105a53dc828f016356b3e3bcae2e3a039d3"}, + {file = "multidict-6.7.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:a088b62bd733e2ad12c50dad01b7d0166c30287c166e137433d3b410add807a6"}, + {file = "multidict-6.7.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:3d51ff4785d58d3f6c91bdbffcb5e1f7ddfda557727043aa20d20ec4f65e324a"}, + {file = "multidict-6.7.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fc5907494fccf3e7d3f94f95c91d6336b092b5fc83811720fae5e2765890dfba"}, + {file = "multidict-6.7.1-cp312-cp312-win32.whl", hash = "sha256:28ca5ce2fd9716631133d0e9a9b9a745ad7f60bac2bccafb56aa380fc0b6c511"}, + {file = "multidict-6.7.1-cp312-cp312-win_amd64.whl", hash = "sha256:fcee94dfbd638784645b066074b338bc9cc155d4b4bffa4adce1615c5a426c19"}, + {file = "multidict-6.7.1-cp312-cp312-win_arm64.whl", hash = "sha256:ba0a9fb644d0c1a2194cf7ffb043bd852cea63a57f66fbd33959f7dae18517bf"}, + {file = "multidict-6.7.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:2b41f5fed0ed563624f1c17630cb9941cf2309d4df00e494b551b5f3e3d67a23"}, + {file = "multidict-6.7.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:84e61e3af5463c19b67ced91f6c634effb89ef8bfc5ca0267f954451ed4bb6a2"}, + {file = "multidict-6.7.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:935434b9853c7c112eee7ac891bc4cb86455aa631269ae35442cb316790c1445"}, + {file = "multidict-6.7.1-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:432feb25a1cb67fe82a9680b4d65fb542e4635cb3166cd9c01560651ad60f177"}, + {file = "multidict-6.7.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e82d14e3c948952a1a85503817e038cba5905a3352de76b9a465075d072fba23"}, + {file = "multidict-6.7.1-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:4cfb48c6ea66c83bcaaf7e4dfa7ec1b6bbcf751b7db85a328902796dfde4c060"}, + {file = "multidict-6.7.1-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:1d540e51b7e8e170174555edecddbd5538105443754539193e3e1061864d444d"}, + {file = "multidict-6.7.1-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:273d23f4b40f3dce4d6c8a821c741a86dec62cded82e1175ba3d99be128147ed"}, + {file = "multidict-6.7.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9d624335fd4fa1c08a53f8b4be7676ebde19cd092b3895c421045ca87895b429"}, + {file = "multidict-6.7.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:12fad252f8b267cc75b66e8fc51b3079604e8d43a75428ffe193cd9e2195dfd6"}, + {file = "multidict-6.7.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:03ede2a6ffbe8ef936b92cb4529f27f42be7f56afcdab5ab739cd5f27fb1cbf9"}, + {file = "multidict-6.7.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:90efbcf47dbe33dcf643a1e400d67d59abeac5db07dc3f27d6bdeae497a2198c"}, + {file = "multidict-6.7.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:5c4b9bfc148f5a91be9244d6264c53035c8a0dcd2f51f1c3c6e30e30ebaa1c84"}, + {file = "multidict-6.7.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:401c5a650f3add2472d1d288c26deebc540f99e2fb83e9525007a74cd2116f1d"}, + {file = "multidict-6.7.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:97891f3b1b3ffbded884e2916cacf3c6fc87b66bb0dde46f7357404750559f33"}, + {file = "multidict-6.7.1-cp313-cp313-win32.whl", hash = "sha256:e1c5988359516095535c4301af38d8a8838534158f649c05dd1050222321bcb3"}, + {file = "multidict-6.7.1-cp313-cp313-win_amd64.whl", hash = "sha256:960c83bf01a95b12b08fd54324a4eb1d5b52c88932b5cba5d6e712bb3ed12eb5"}, + {file = "multidict-6.7.1-cp313-cp313-win_arm64.whl", hash = "sha256:563fe25c678aaba333d5399408f5ec3c383ca5b663e7f774dd179a520b8144df"}, + {file = "multidict-6.7.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:c76c4bec1538375dad9d452d246ca5368ad6e1c9039dadcf007ae59c70619ea1"}, + {file = "multidict-6.7.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:57b46b24b5d5ebcc978da4ec23a819a9402b4228b8a90d9c656422b4bdd8a963"}, + {file = "multidict-6.7.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e954b24433c768ce78ab7929e84ccf3422e46deb45a4dc9f93438f8217fa2d34"}, + {file = "multidict-6.7.1-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3bd231490fa7217cc832528e1cd8752a96f0125ddd2b5749390f7c3ec8721b65"}, + {file = "multidict-6.7.1-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:253282d70d67885a15c8a7716f3a73edf2d635793ceda8173b9ecc21f2fb8292"}, + {file = "multidict-6.7.1-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:0b4c48648d7649c9335cf1927a8b87fa692de3dcb15faa676c6a6f1f1aabda43"}, + {file = "multidict-6.7.1-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:98bc624954ec4d2c7cb074b8eefc2b5d0ce7d482e410df446414355d158fe4ca"}, + {file = "multidict-6.7.1-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:1b99af4d9eec0b49927b4402bcbb58dea89d3e0db8806a4086117019939ad3dd"}, + {file = "multidict-6.7.1-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6aac4f16b472d5b7dc6f66a0d49dd57b0e0902090be16594dc9ebfd3d17c47e7"}, + {file = "multidict-6.7.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:21f830fe223215dffd51f538e78c172ed7c7f60c9b96a2bf05c4848ad49921c3"}, + {file = "multidict-6.7.1-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:f5dd81c45b05518b9aa4da4aa74e1c93d715efa234fd3e8a179df611cc85e5f4"}, + {file = "multidict-6.7.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:eb304767bca2bb92fb9c5bd33cedc95baee5bb5f6c88e63706533a1c06ad08c8"}, + {file = "multidict-6.7.1-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:c9035dde0f916702850ef66460bc4239d89d08df4d02023a5926e7446724212c"}, + {file = "multidict-6.7.1-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:af959b9beeb66c822380f222f0e0a1889331597e81f1ded7f374f3ecb0fd6c52"}, + {file = "multidict-6.7.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:41f2952231456154ee479651491e94118229844dd7226541788be783be2b5108"}, + {file = "multidict-6.7.1-cp313-cp313t-win32.whl", hash = "sha256:df9f19c28adcb40b6aae30bbaa1478c389efd50c28d541d76760199fc1037c32"}, + {file = "multidict-6.7.1-cp313-cp313t-win_amd64.whl", hash = "sha256:d54ecf9f301853f2c5e802da559604b3e95bb7a3b01a9c295c6ee591b9882de8"}, + {file = "multidict-6.7.1-cp313-cp313t-win_arm64.whl", hash = "sha256:5a37ca18e360377cfda1d62f5f382ff41f2b8c4ccb329ed974cc2e1643440118"}, + {file = "multidict-6.7.1-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:8f333ec9c5eb1b7105e3b84b53141e66ca05a19a605368c55450b6ba208cb9ee"}, + {file = "multidict-6.7.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:a407f13c188f804c759fc6a9f88286a565c242a76b27626594c133b82883b5c2"}, + {file = "multidict-6.7.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:0e161ddf326db5577c3a4cc2d8648f81456e8a20d40415541587a71620d7a7d1"}, + {file = "multidict-6.7.1-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:1e3a8bb24342a8201d178c3b4984c26ba81a577c80d4d525727427460a50c22d"}, + {file = "multidict-6.7.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:97231140a50f5d447d3164f994b86a0bed7cd016e2682f8650d6a9158e14fd31"}, + {file = "multidict-6.7.1-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:6b10359683bd8806a200fd2909e7c8ca3a7b24ec1d8132e483d58e791d881048"}, + {file = "multidict-6.7.1-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:283ddac99f7ac25a4acadbf004cb5ae34480bbeb063520f70ce397b281859362"}, + {file = "multidict-6.7.1-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:538cec1e18c067d0e6103aa9a74f9e832904c957adc260e61cd9d8cf0c3b3d37"}, + {file = "multidict-6.7.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7eee46ccb30ff48a1e35bb818cc90846c6be2b68240e42a78599166722cea709"}, + {file = "multidict-6.7.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:fa263a02f4f2dd2d11a7b1bb4362aa7cb1049f84a9235d31adf63f30143469a0"}, + {file = "multidict-6.7.1-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:2e1425e2f99ec5bd36c15a01b690a1a2456209c5deed58f95469ffb46039ccbb"}, + {file = "multidict-6.7.1-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:497394b3239fc6f0e13a78a3e1b61296e72bf1c5f94b4c4eb80b265c37a131cd"}, + {file = "multidict-6.7.1-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:233b398c29d3f1b9676b4b6f75c518a06fcb2ea0b925119fb2c1bc35c05e1601"}, + {file = "multidict-6.7.1-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:93b1818e4a6e0930454f0f2af7dfce69307ca03cdcfb3739bf4d91241967b6c1"}, + {file = "multidict-6.7.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:f33dc2a3abe9249ea5d8360f969ec7f4142e7ac45ee7014d8f8d5acddf178b7b"}, + {file = "multidict-6.7.1-cp314-cp314-win32.whl", hash = "sha256:3ab8b9d8b75aef9df299595d5388b14530839f6422333357af1339443cff777d"}, + {file = "multidict-6.7.1-cp314-cp314-win_amd64.whl", hash = "sha256:5e01429a929600e7dab7b166062d9bb54a5eed752384c7384c968c2afab8f50f"}, + {file = "multidict-6.7.1-cp314-cp314-win_arm64.whl", hash = "sha256:4885cb0e817aef5d00a2e8451d4665c1808378dc27c2705f1bf4ef8505c0d2e5"}, + {file = "multidict-6.7.1-cp314-cp314t-macosx_10_15_universal2.whl", hash = "sha256:0458c978acd8e6ea53c81eefaddbbee9c6c5e591f41b3f5e8e194780fe026581"}, + {file = "multidict-6.7.1-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:c0abd12629b0af3cf590982c0b413b1e7395cd4ec026f30986818ab95bfaa94a"}, + {file = "multidict-6.7.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:14525a5f61d7d0c94b368a42cff4c9a4e7ba2d52e2672a7b23d84dc86fb02b0c"}, + {file = "multidict-6.7.1-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:17307b22c217b4cf05033dabefe68255a534d637c6c9b0cc8382718f87be4262"}, + {file = "multidict-6.7.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7a7e590ff876a3eaf1c02a4dfe0724b6e69a9e9de6d8f556816f29c496046e59"}, + {file = "multidict-6.7.1-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:5fa6a95dfee63893d80a34758cd0e0c118a30b8dcb46372bf75106c591b77889"}, + {file = "multidict-6.7.1-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a0543217a6a017692aa6ae5cc39adb75e587af0f3a82288b1492eb73dd6cc2a4"}, + {file = "multidict-6.7.1-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f99fe611c312b3c1c0ace793f92464d8cd263cc3b26b5721950d977b006b6c4d"}, + {file = "multidict-6.7.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9004d8386d133b7e6135679424c91b0b854d2d164af6ea3f289f8f2761064609"}, + {file = "multidict-6.7.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e628ef0e6859ffd8273c69412a2465c4be4a9517d07261b33334b5ec6f3c7489"}, + {file = "multidict-6.7.1-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:841189848ba629c3552035a6a7f5bf3b02eb304e9fea7492ca220a8eda6b0e5c"}, + {file = "multidict-6.7.1-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:ce1bbd7d780bb5a0da032e095c951f7014d6b0a205f8318308140f1a6aba159e"}, + {file = "multidict-6.7.1-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:b26684587228afed0d50cf804cc71062cc9c1cdf55051c4c6345d372947b268c"}, + {file = "multidict-6.7.1-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:9f9af11306994335398293f9958071019e3ab95e9a707dc1383a35613f6abcb9"}, + {file = "multidict-6.7.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:b4938326284c4f1224178a560987b6cf8b4d38458b113d9b8c1db1a836e640a2"}, + {file = "multidict-6.7.1-cp314-cp314t-win32.whl", hash = "sha256:98655c737850c064a65e006a3df7c997cd3b220be4ec8fe26215760b9697d4d7"}, + {file = "multidict-6.7.1-cp314-cp314t-win_amd64.whl", hash = "sha256:497bde6223c212ba11d462853cfa4f0ae6ef97465033e7dc9940cdb3ab5b48e5"}, + {file = "multidict-6.7.1-cp314-cp314t-win_arm64.whl", hash = "sha256:2bbd113e0d4af5db41d5ebfe9ccaff89de2120578164f86a5d17d5a576d1e5b2"}, + {file = "multidict-6.7.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:65573858d27cdeaca41893185677dc82395159aa28875a8867af66532d413a8f"}, + {file = "multidict-6.7.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c524c6fb8fc342793708ab111c4dbc90ff9abd568de220432500e47e990c0358"}, + {file = "multidict-6.7.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:aa23b001d968faef416ff70dc0f1ab045517b9b42a90edd3e9bcdb06479e31d5"}, + {file = "multidict-6.7.1-cp39-cp39-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:6704fa2b7453b2fb121740555fa1ee20cd98c4d011120caf4d2b8d4e7c76eec0"}, + {file = "multidict-6.7.1-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:121a34e5bfa410cdf2c8c49716de160de3b1dbcd86b49656f5681e4543bcd1a8"}, + {file = "multidict-6.7.1-cp39-cp39-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:026d264228bcd637d4e060844e39cdc60f86c479e463d49075dedc21b18fbbe0"}, + {file = "multidict-6.7.1-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:0e697826df7eb63418ee190fd06ce9f1803593bb4b9517d08c60d9b9a7f69d8f"}, + {file = "multidict-6.7.1-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:bb08271280173720e9fea9ede98e5231defcbad90f1624bea26f32ec8a956e2f"}, + {file = "multidict-6.7.1-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c6b3228e1d80af737b72925ce5fb4daf5a335e49cd7ab77ed7b9fdfbf58c526e"}, + {file = "multidict-6.7.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:3943debf0fbb57bdde5901695c11094a9a36723e5c03875f87718ee15ca2f4d2"}, + {file = "multidict-6.7.1-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:98c5787b0a0d9a41d9311eae44c3b76e6753def8d8870ab501320efe75a6a5f8"}, + {file = "multidict-6.7.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:08ccb2a6dc72009093ebe7f3f073e5ec5964cba9a706fa94b1a1484039b87941"}, + {file = "multidict-6.7.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:eb351f72c26dc9abe338ca7294661aa22969ad8ffe7ef7d5541d19f368dc854a"}, + {file = "multidict-6.7.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ac1c665bad8b5d762f5f85ebe4d94130c26965f11de70c708c75671297c776de"}, + {file = "multidict-6.7.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1fa6609d0364f4f6f58351b4659a1f3e0e898ba2a8c5cac04cb2c7bc556b0bc5"}, + {file = "multidict-6.7.1-cp39-cp39-win32.whl", hash = "sha256:6f77ce314a29263e67adadc7e7c1bc699fcb3a305059ab973d038f87caa42ed0"}, + {file = "multidict-6.7.1-cp39-cp39-win_amd64.whl", hash = "sha256:f537b55778cd3cbee430abe3131255d3a78202e0f9ea7ffc6ada893a4bcaeea4"}, + {file = "multidict-6.7.1-cp39-cp39-win_arm64.whl", hash = "sha256:749aa54f578f2e5f439538706a475aa844bfa8ef75854b1401e6e528e4937cf9"}, + {file = "multidict-6.7.1-py3-none-any.whl", hash = "sha256:55d97cc6dae627efa6a6e548885712d4864b81110ac76fa4e534c03819fa4a56"}, + {file = "multidict-6.7.1.tar.gz", hash = "sha256:ec6652a1bee61c53a3e5776b6049172c53b6aaba34f18c9ad04f82712bac623d"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.11\""} + +[[package]] +name = "numpy" +version = "2.2.6" +description = "Fundamental package for array computing in Python" +optional = false +python-versions = ">=3.10" +groups = ["main"] +files = [ + {file = "numpy-2.2.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b412caa66f72040e6d268491a59f2c43bf03eb6c96dd8f0307829feb7fa2b6fb"}, + {file = "numpy-2.2.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e41fd67c52b86603a91c1a505ebaef50b3314de0213461c7a6e99c9a3beff90"}, + {file = "numpy-2.2.6-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:37e990a01ae6ec7fe7fa1c26c55ecb672dd98b19c3d0e1d1f326fa13cb38d163"}, + {file = "numpy-2.2.6-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:5a6429d4be8ca66d889b7cf70f536a397dc45ba6faeb5f8c5427935d9592e9cf"}, + {file = "numpy-2.2.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:efd28d4e9cd7d7a8d39074a4d44c63eda73401580c5c76acda2ce969e0a38e83"}, + {file = "numpy-2.2.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc7b73d02efb0e18c000e9ad8b83480dfcd5dfd11065997ed4c6747470ae8915"}, + {file = "numpy-2.2.6-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:74d4531beb257d2c3f4b261bfb0fc09e0f9ebb8842d82a7b4209415896adc680"}, + {file = "numpy-2.2.6-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8fc377d995680230e83241d8a96def29f204b5782f371c532579b4f20607a289"}, + {file = "numpy-2.2.6-cp310-cp310-win32.whl", hash = "sha256:b093dd74e50a8cba3e873868d9e93a85b78e0daf2e98c6797566ad8044e8363d"}, + {file = "numpy-2.2.6-cp310-cp310-win_amd64.whl", hash = "sha256:f0fd6321b839904e15c46e0d257fdd101dd7f530fe03fd6359c1ea63738703f3"}, + {file = "numpy-2.2.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f9f1adb22318e121c5c69a09142811a201ef17ab257a1e66ca3025065b7f53ae"}, + {file = "numpy-2.2.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c820a93b0255bc360f53eca31a0e676fd1101f673dda8da93454a12e23fc5f7a"}, + {file = "numpy-2.2.6-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:3d70692235e759f260c3d837193090014aebdf026dfd167834bcba43e30c2a42"}, + {file = "numpy-2.2.6-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:481b49095335f8eed42e39e8041327c05b0f6f4780488f61286ed3c01368d491"}, + {file = "numpy-2.2.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b64d8d4d17135e00c8e346e0a738deb17e754230d7e0810ac5012750bbd85a5a"}, + {file = "numpy-2.2.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba10f8411898fc418a521833e014a77d3ca01c15b0c6cdcce6a0d2897e6dbbdf"}, + {file = "numpy-2.2.6-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:bd48227a919f1bafbdda0583705e547892342c26fb127219d60a5c36882609d1"}, + {file = "numpy-2.2.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9551a499bf125c1d4f9e250377c1ee2eddd02e01eac6644c080162c0c51778ab"}, + {file = "numpy-2.2.6-cp311-cp311-win32.whl", hash = "sha256:0678000bb9ac1475cd454c6b8c799206af8107e310843532b04d49649c717a47"}, + {file = "numpy-2.2.6-cp311-cp311-win_amd64.whl", hash = "sha256:e8213002e427c69c45a52bbd94163084025f533a55a59d6f9c5b820774ef3303"}, + {file = "numpy-2.2.6-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:41c5a21f4a04fa86436124d388f6ed60a9343a6f767fced1a8a71c3fbca038ff"}, + {file = "numpy-2.2.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:de749064336d37e340f640b05f24e9e3dd678c57318c7289d222a8a2f543e90c"}, + {file = "numpy-2.2.6-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:894b3a42502226a1cac872f840030665f33326fc3dac8e57c607905773cdcde3"}, + {file = "numpy-2.2.6-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:71594f7c51a18e728451bb50cc60a3ce4e6538822731b2933209a1f3614e9282"}, + {file = "numpy-2.2.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f2618db89be1b4e05f7a1a847a9c1c0abd63e63a1607d892dd54668dd92faf87"}, + {file = "numpy-2.2.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd83c01228a688733f1ded5201c678f0c53ecc1006ffbc404db9f7a899ac6249"}, + {file = "numpy-2.2.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:37c0ca431f82cd5fa716eca9506aefcabc247fb27ba69c5062a6d3ade8cf8f49"}, + {file = "numpy-2.2.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fe27749d33bb772c80dcd84ae7e8df2adc920ae8297400dabec45f0dedb3f6de"}, + {file = "numpy-2.2.6-cp312-cp312-win32.whl", hash = "sha256:4eeaae00d789f66c7a25ac5f34b71a7035bb474e679f410e5e1a94deb24cf2d4"}, + {file = "numpy-2.2.6-cp312-cp312-win_amd64.whl", hash = "sha256:c1f9540be57940698ed329904db803cf7a402f3fc200bfe599334c9bd84a40b2"}, + {file = "numpy-2.2.6-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:0811bb762109d9708cca4d0b13c4f67146e3c3b7cf8d34018c722adb2d957c84"}, + {file = "numpy-2.2.6-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:287cc3162b6f01463ccd86be154f284d0893d2b3ed7292439ea97eafa8170e0b"}, + {file = "numpy-2.2.6-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:f1372f041402e37e5e633e586f62aa53de2eac8d98cbfb822806ce4bbefcb74d"}, + {file = "numpy-2.2.6-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:55a4d33fa519660d69614a9fad433be87e5252f4b03850642f88993f7b2ca566"}, + {file = "numpy-2.2.6-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f92729c95468a2f4f15e9bb94c432a9229d0d50de67304399627a943201baa2f"}, + {file = "numpy-2.2.6-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1bc23a79bfabc5d056d106f9befb8d50c31ced2fbc70eedb8155aec74a45798f"}, + {file = "numpy-2.2.6-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e3143e4451880bed956e706a3220b4e5cf6172ef05fcc397f6f36a550b1dd868"}, + {file = "numpy-2.2.6-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b4f13750ce79751586ae2eb824ba7e1e8dba64784086c98cdbbcc6a42112ce0d"}, + {file = "numpy-2.2.6-cp313-cp313-win32.whl", hash = "sha256:5beb72339d9d4fa36522fc63802f469b13cdbe4fdab4a288f0c441b74272ebfd"}, + {file = "numpy-2.2.6-cp313-cp313-win_amd64.whl", hash = "sha256:b0544343a702fa80c95ad5d3d608ea3599dd54d4632df855e4c8d24eb6ecfa1c"}, + {file = "numpy-2.2.6-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0bca768cd85ae743b2affdc762d617eddf3bcf8724435498a1e80132d04879e6"}, + {file = "numpy-2.2.6-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:fc0c5673685c508a142ca65209b4e79ed6740a4ed6b2267dbba90f34b0b3cfda"}, + {file = "numpy-2.2.6-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:5bd4fc3ac8926b3819797a7c0e2631eb889b4118a9898c84f585a54d475b7e40"}, + {file = "numpy-2.2.6-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:fee4236c876c4e8369388054d02d0e9bb84821feb1a64dd59e137e6511a551f8"}, + {file = "numpy-2.2.6-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e1dda9c7e08dc141e0247a5b8f49cf05984955246a327d4c48bda16821947b2f"}, + {file = "numpy-2.2.6-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f447e6acb680fd307f40d3da4852208af94afdfab89cf850986c3ca00562f4fa"}, + {file = "numpy-2.2.6-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:389d771b1623ec92636b0786bc4ae56abafad4a4c513d36a55dce14bd9ce8571"}, + {file = "numpy-2.2.6-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:8e9ace4a37db23421249ed236fdcdd457d671e25146786dfc96835cd951aa7c1"}, + {file = "numpy-2.2.6-cp313-cp313t-win32.whl", hash = "sha256:038613e9fb8c72b0a41f025a7e4c3f0b7a1b5d768ece4796b674c8f3fe13efff"}, + {file = "numpy-2.2.6-cp313-cp313t-win_amd64.whl", hash = "sha256:6031dd6dfecc0cf9f668681a37648373bddd6421fff6c66ec1624eed0180ee06"}, + {file = "numpy-2.2.6-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:0b605b275d7bd0c640cad4e5d30fa701a8d59302e127e5f79138ad62762c3e3d"}, + {file = "numpy-2.2.6-pp310-pypy310_pp73-macosx_14_0_x86_64.whl", hash = "sha256:7befc596a7dc9da8a337f79802ee8adb30a552a94f792b9c9d18c840055907db"}, + {file = "numpy-2.2.6-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce47521a4754c8f4593837384bd3424880629f718d87c5d44f8ed763edd63543"}, + {file = "numpy-2.2.6-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d042d24c90c41b54fd506da306759e06e568864df8ec17ccc17e9e884634fd00"}, + {file = "numpy-2.2.6.tar.gz", hash = "sha256:e29554e2bef54a90aa5cc07da6ce955accb83f21ab5de01a62c8478897b264fd"}, +] + +[[package]] +name = "ollama" +version = "0.3.3" +description = "The official Python client for Ollama." +optional = false +python-versions = "<4.0,>=3.8" +groups = ["main"] +files = [ + {file = "ollama-0.3.3-py3-none-any.whl", hash = "sha256:ca6242ce78ab34758082b7392df3f9f6c2cb1d070a9dede1a4c545c929e16dba"}, + {file = "ollama-0.3.3.tar.gz", hash = "sha256:f90a6d61803117f40b0e8ff17465cab5e1eb24758a473cfe8101aff38bc13b51"}, +] + +[package.dependencies] +httpx = ">=0.27.0,<0.28.0" + +[[package]] +name = "openai" +version = "1.109.1" +description = "The official Python library for the openai API" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "openai-1.109.1-py3-none-any.whl", hash = "sha256:6bcaf57086cf59159b8e27447e4e7dd019db5d29a438072fbd49c290c7e65315"}, + {file = "openai-1.109.1.tar.gz", hash = "sha256:d173ed8dbca665892a6db099b4a2dfac624f94d20a93f46eb0b56aae940ed869"}, +] + +[package.dependencies] +anyio = ">=3.5.0,<5" +distro = ">=1.7.0,<2" +httpx = ">=0.23.0,<1" +jiter = ">=0.4.0,<1" +pydantic = ">=1.9.0,<3" +sniffio = "*" +tqdm = ">4" +typing-extensions = ">=4.11,<5" + +[package.extras] +aiohttp = ["aiohttp", "httpx-aiohttp (>=0.1.8)"] +datalib = ["numpy (>=1)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)"] +realtime = ["websockets (>=13,<16)"] +voice-helpers = ["numpy (>=2.0.2)", "sounddevice (>=0.5.1)"] + +[[package]] +name = "packaging" +version = "26.0" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "packaging-26.0-py3-none-any.whl", hash = "sha256:b36f1fef9334a5588b4166f8bcd26a14e521f2b55e6b9de3aaa80d3ff7a37529"}, + {file = "packaging-26.0.tar.gz", hash = "sha256:00243ae351a257117b6a241061796684b084ed1c516a08c48a3f7e147a9d80b4"}, +] + +[[package]] +name = "pillow" +version = "10.4.0" +description = "Python Imaging Library (Fork)" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "pillow-10.4.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:4d9667937cfa347525b319ae34375c37b9ee6b525440f3ef48542fcf66f2731e"}, + {file = "pillow-10.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:543f3dc61c18dafb755773efc89aae60d06b6596a63914107f75459cf984164d"}, + {file = "pillow-10.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7928ecbf1ece13956b95d9cbcfc77137652b02763ba384d9ab508099a2eca856"}, + {file = "pillow-10.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4d49b85c4348ea0b31ea63bc75a9f3857869174e2bf17e7aba02945cd218e6f"}, + {file = "pillow-10.4.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:6c762a5b0997f5659a5ef2266abc1d8851ad7749ad9a6a5506eb23d314e4f46b"}, + {file = "pillow-10.4.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:a985e028fc183bf12a77a8bbf36318db4238a3ded7fa9df1b9a133f1cb79f8fc"}, + {file = "pillow-10.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:812f7342b0eee081eaec84d91423d1b4650bb9828eb53d8511bcef8ce5aecf1e"}, + {file = "pillow-10.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ac1452d2fbe4978c2eec89fb5a23b8387aba707ac72810d9490118817d9c0b46"}, + {file = "pillow-10.4.0-cp310-cp310-win32.whl", hash = "sha256:bcd5e41a859bf2e84fdc42f4edb7d9aba0a13d29a2abadccafad99de3feff984"}, + {file = "pillow-10.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:ecd85a8d3e79cd7158dec1c9e5808e821feea088e2f69a974db5edf84dc53141"}, + {file = "pillow-10.4.0-cp310-cp310-win_arm64.whl", hash = "sha256:ff337c552345e95702c5fde3158acb0625111017d0e5f24bf3acdb9cc16b90d1"}, + {file = "pillow-10.4.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:0a9ec697746f268507404647e531e92889890a087e03681a3606d9b920fbee3c"}, + {file = "pillow-10.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:dfe91cb65544a1321e631e696759491ae04a2ea11d36715eca01ce07284738be"}, + {file = "pillow-10.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5dc6761a6efc781e6a1544206f22c80c3af4c8cf461206d46a1e6006e4429ff3"}, + {file = "pillow-10.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e84b6cc6a4a3d76c153a6b19270b3526a5a8ed6b09501d3af891daa2a9de7d6"}, + {file = "pillow-10.4.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:bbc527b519bd3aa9d7f429d152fea69f9ad37c95f0b02aebddff592688998abe"}, + {file = "pillow-10.4.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:76a911dfe51a36041f2e756b00f96ed84677cdeb75d25c767f296c1c1eda1319"}, + {file = "pillow-10.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:59291fb29317122398786c2d44427bbd1a6d7ff54017075b22be9d21aa59bd8d"}, + {file = "pillow-10.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:416d3a5d0e8cfe4f27f574362435bc9bae57f679a7158e0096ad2beb427b8696"}, + {file = "pillow-10.4.0-cp311-cp311-win32.whl", hash = "sha256:7086cc1d5eebb91ad24ded9f58bec6c688e9f0ed7eb3dbbf1e4800280a896496"}, + {file = "pillow-10.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cbed61494057c0f83b83eb3a310f0bf774b09513307c434d4366ed64f4128a91"}, + {file = "pillow-10.4.0-cp311-cp311-win_arm64.whl", hash = "sha256:f5f0c3e969c8f12dd2bb7e0b15d5c468b51e5017e01e2e867335c81903046a22"}, + {file = "pillow-10.4.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:673655af3eadf4df6b5457033f086e90299fdd7a47983a13827acf7459c15d94"}, + {file = "pillow-10.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:866b6942a92f56300012f5fbac71f2d610312ee65e22f1aa2609e491284e5597"}, + {file = "pillow-10.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29dbdc4207642ea6aad70fbde1a9338753d33fb23ed6956e706936706f52dd80"}, + {file = "pillow-10.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf2342ac639c4cf38799a44950bbc2dfcb685f052b9e262f446482afaf4bffca"}, + {file = "pillow-10.4.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:f5b92f4d70791b4a67157321c4e8225d60b119c5cc9aee8ecf153aace4aad4ef"}, + {file = "pillow-10.4.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:86dcb5a1eb778d8b25659d5e4341269e8590ad6b4e8b44d9f4b07f8d136c414a"}, + {file = "pillow-10.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:780c072c2e11c9b2c7ca37f9a2ee8ba66f44367ac3e5c7832afcfe5104fd6d1b"}, + {file = "pillow-10.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:37fb69d905be665f68f28a8bba3c6d3223c8efe1edf14cc4cfa06c241f8c81d9"}, + {file = "pillow-10.4.0-cp312-cp312-win32.whl", hash = "sha256:7dfecdbad5c301d7b5bde160150b4db4c659cee2b69589705b6f8a0c509d9f42"}, + {file = "pillow-10.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:1d846aea995ad352d4bdcc847535bd56e0fd88d36829d2c90be880ef1ee4668a"}, + {file = "pillow-10.4.0-cp312-cp312-win_arm64.whl", hash = "sha256:e553cad5179a66ba15bb18b353a19020e73a7921296a7979c4a2b7f6a5cd57f9"}, + {file = "pillow-10.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8bc1a764ed8c957a2e9cacf97c8b2b053b70307cf2996aafd70e91a082e70df3"}, + {file = "pillow-10.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6209bb41dc692ddfee4942517c19ee81b86c864b626dbfca272ec0f7cff5d9fb"}, + {file = "pillow-10.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bee197b30783295d2eb680b311af15a20a8b24024a19c3a26431ff83eb8d1f70"}, + {file = "pillow-10.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ef61f5dd14c300786318482456481463b9d6b91ebe5ef12f405afbba77ed0be"}, + {file = "pillow-10.4.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:297e388da6e248c98bc4a02e018966af0c5f92dfacf5a5ca22fa01cb3179bca0"}, + {file = "pillow-10.4.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:e4db64794ccdf6cb83a59d73405f63adbe2a1887012e308828596100a0b2f6cc"}, + {file = "pillow-10.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bd2880a07482090a3bcb01f4265f1936a903d70bc740bfcb1fd4e8a2ffe5cf5a"}, + {file = "pillow-10.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4b35b21b819ac1dbd1233317adeecd63495f6babf21b7b2512d244ff6c6ce309"}, + {file = "pillow-10.4.0-cp313-cp313-win32.whl", hash = "sha256:551d3fd6e9dc15e4c1eb6fc4ba2b39c0c7933fa113b220057a34f4bb3268a060"}, + {file = "pillow-10.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:030abdbe43ee02e0de642aee345efa443740aa4d828bfe8e2eb11922ea6a21ea"}, + {file = "pillow-10.4.0-cp313-cp313-win_arm64.whl", hash = "sha256:5b001114dd152cfd6b23befeb28d7aee43553e2402c9f159807bf55f33af8a8d"}, + {file = "pillow-10.4.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:8d4d5063501b6dd4024b8ac2f04962d661222d120381272deea52e3fc52d3736"}, + {file = "pillow-10.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7c1ee6f42250df403c5f103cbd2768a28fe1a0ea1f0f03fe151c8741e1469c8b"}, + {file = "pillow-10.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b15e02e9bb4c21e39876698abf233c8c579127986f8207200bc8a8f6bb27acf2"}, + {file = "pillow-10.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a8d4bade9952ea9a77d0c3e49cbd8b2890a399422258a77f357b9cc9be8d680"}, + {file = "pillow-10.4.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:43efea75eb06b95d1631cb784aa40156177bf9dd5b4b03ff38979e048258bc6b"}, + {file = "pillow-10.4.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:950be4d8ba92aca4b2bb0741285a46bfae3ca699ef913ec8416c1b78eadd64cd"}, + {file = "pillow-10.4.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:d7480af14364494365e89d6fddc510a13e5a2c3584cb19ef65415ca57252fb84"}, + {file = "pillow-10.4.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:73664fe514b34c8f02452ffb73b7a92c6774e39a647087f83d67f010eb9a0cf0"}, + {file = "pillow-10.4.0-cp38-cp38-win32.whl", hash = "sha256:e88d5e6ad0d026fba7bdab8c3f225a69f063f116462c49892b0149e21b6c0a0e"}, + {file = "pillow-10.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:5161eef006d335e46895297f642341111945e2c1c899eb406882a6c61a4357ab"}, + {file = "pillow-10.4.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:0ae24a547e8b711ccaaf99c9ae3cd975470e1a30caa80a6aaee9a2f19c05701d"}, + {file = "pillow-10.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:298478fe4f77a4408895605f3482b6cc6222c018b2ce565c2b6b9c354ac3229b"}, + {file = "pillow-10.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:134ace6dc392116566980ee7436477d844520a26a4b1bd4053f6f47d096997fd"}, + {file = "pillow-10.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:930044bb7679ab003b14023138b50181899da3f25de50e9dbee23b61b4de2126"}, + {file = "pillow-10.4.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:c76e5786951e72ed3686e122d14c5d7012f16c8303a674d18cdcd6d89557fc5b"}, + {file = "pillow-10.4.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:b2724fdb354a868ddf9a880cb84d102da914e99119211ef7ecbdc613b8c96b3c"}, + {file = "pillow-10.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:dbc6ae66518ab3c5847659e9988c3b60dc94ffb48ef9168656e0019a93dbf8a1"}, + {file = "pillow-10.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:06b2f7898047ae93fad74467ec3d28fe84f7831370e3c258afa533f81ef7f3df"}, + {file = "pillow-10.4.0-cp39-cp39-win32.whl", hash = "sha256:7970285ab628a3779aecc35823296a7869f889b8329c16ad5a71e4901a3dc4ef"}, + {file = "pillow-10.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:961a7293b2457b405967af9c77dcaa43cc1a8cd50d23c532e62d48ab6cdd56f5"}, + {file = "pillow-10.4.0-cp39-cp39-win_arm64.whl", hash = "sha256:32cda9e3d601a52baccb2856b8ea1fc213c90b340c542dcef77140dfa3278a9e"}, + {file = "pillow-10.4.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:5b4815f2e65b30f5fbae9dfffa8636d992d49705723fe86a3661806e069352d4"}, + {file = "pillow-10.4.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:8f0aef4ef59694b12cadee839e2ba6afeab89c0f39a3adc02ed51d109117b8da"}, + {file = "pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9f4727572e2918acaa9077c919cbbeb73bd2b3ebcfe033b72f858fc9fbef0026"}, + {file = "pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff25afb18123cea58a591ea0244b92eb1e61a1fd497bf6d6384f09bc3262ec3e"}, + {file = "pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:dc3e2db6ba09ffd7d02ae9141cfa0ae23393ee7687248d46a7507b75d610f4f5"}, + {file = "pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:02a2be69f9c9b8c1e97cf2713e789d4e398c751ecfd9967c18d0ce304efbf885"}, + {file = "pillow-10.4.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:0755ffd4a0c6f267cccbae2e9903d95477ca2f77c4fcf3a3a09570001856c8a5"}, + {file = "pillow-10.4.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:a02364621fe369e06200d4a16558e056fe2805d3468350df3aef21e00d26214b"}, + {file = "pillow-10.4.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:1b5dea9831a90e9d0721ec417a80d4cbd7022093ac38a568db2dd78363b00908"}, + {file = "pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b885f89040bb8c4a1573566bbb2f44f5c505ef6e74cec7ab9068c900047f04b"}, + {file = "pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87dd88ded2e6d74d31e1e0a99a726a6765cda32d00ba72dc37f0651f306daaa8"}, + {file = "pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:2db98790afc70118bd0255c2eeb465e9767ecf1f3c25f9a1abb8ffc8cfd1fe0a"}, + {file = "pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f7baece4ce06bade126fb84b8af1c33439a76d8a6fd818970215e0560ca28c27"}, + {file = "pillow-10.4.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:cfdd747216947628af7b259d274771d84db2268ca062dd5faf373639d00113a3"}, + {file = "pillow-10.4.0.tar.gz", hash = "sha256:166c1cd4d24309b30d61f79f4a9114b7b2313d7450912277855ff5dfd7cd4a06"}, +] + +[package.extras] +docs = ["furo", "olefile", "sphinx (>=7.3)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinxext-opengraph"] +fpx = ["olefile"] +mic = ["olefile"] +tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"] +typing = ["typing-extensions ; python_version < \"3.10\""] +xmp = ["defusedxml"] + +[[package]] +name = "playwright" +version = "1.58.0" +description = "A high-level API to automate web browsers" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "playwright-1.58.0-py3-none-macosx_10_13_x86_64.whl", hash = "sha256:96e3204aac292ee639edbfdef6298b4be2ea0a55a16b7068df91adac077cc606"}, + {file = "playwright-1.58.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:70c763694739d28df71ed578b9c8202bb83e8fe8fb9268c04dd13afe36301f71"}, + {file = "playwright-1.58.0-py3-none-macosx_11_0_universal2.whl", hash = "sha256:185e0132578733d02802dfddfbbc35f42be23a45ff49ccae5081f25952238117"}, + {file = "playwright-1.58.0-py3-none-manylinux1_x86_64.whl", hash = "sha256:c95568ba1eda83812598c1dc9be60b4406dffd60b149bc1536180ad108723d6b"}, + {file = "playwright-1.58.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f9999948f1ab541d98812de25e3a8c410776aa516d948807140aff797b4bffa"}, + {file = "playwright-1.58.0-py3-none-win32.whl", hash = "sha256:1e03be090e75a0fabbdaeab65ce17c308c425d879fa48bb1d7986f96bfad0b99"}, + {file = "playwright-1.58.0-py3-none-win_amd64.whl", hash = "sha256:a2bf639d0ce33b3ba38de777e08697b0d8f3dc07ab6802e4ac53fb65e3907af8"}, + {file = "playwright-1.58.0-py3-none-win_arm64.whl", hash = "sha256:32ffe5c303901a13a0ecab91d1c3f74baf73b84f4bedbb6b935f5bc11cc98e1b"}, +] + +[package.dependencies] +greenlet = ">=3.1.1,<4.0.0" +pyee = ">=13,<14" + +[[package]] +name = "prometheus-client" +version = "0.24.1" +description = "Python client for the Prometheus monitoring system." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "prometheus_client-0.24.1-py3-none-any.whl", hash = "sha256:150db128af71a5c2482b36e588fc8a6b95e498750da4b17065947c16070f4055"}, + {file = "prometheus_client-0.24.1.tar.gz", hash = "sha256:7e0ced7fbbd40f7b84962d5d2ab6f17ef88a72504dcf7c0b40737b43b2a461f9"}, +] + +[package.extras] +aiohttp = ["aiohttp"] +django = ["django"] +twisted = ["twisted"] + +[[package]] +name = "propcache" +version = "0.4.1" +description = "Accelerated property cache" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "propcache-0.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7c2d1fa3201efaf55d730400d945b5b3ab6e672e100ba0f9a409d950ab25d7db"}, + {file = "propcache-0.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1eb2994229cc8ce7fe9b3db88f5465f5fd8651672840b2e426b88cdb1a30aac8"}, + {file = "propcache-0.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:66c1f011f45a3b33d7bcb22daed4b29c0c9e2224758b6be00686731e1b46f925"}, + {file = "propcache-0.4.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9a52009f2adffe195d0b605c25ec929d26b36ef986ba85244891dee3b294df21"}, + {file = "propcache-0.4.1-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:5d4e2366a9c7b837555cf02fb9be2e3167d333aff716332ef1b7c3a142ec40c5"}, + {file = "propcache-0.4.1-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:9d2b6caef873b4f09e26ea7e33d65f42b944837563a47a94719cc3544319a0db"}, + {file = "propcache-0.4.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2b16ec437a8c8a965ecf95739448dd938b5c7f56e67ea009f4300d8df05f32b7"}, + {file = "propcache-0.4.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:296f4c8ed03ca7476813fe666c9ea97869a8d7aec972618671b33a38a5182ef4"}, + {file = "propcache-0.4.1-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:1f0978529a418ebd1f49dad413a2b68af33f85d5c5ca5c6ca2a3bed375a7ac60"}, + {file = "propcache-0.4.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:fd138803047fb4c062b1c1dd95462f5209456bfab55c734458f15d11da288f8f"}, + {file = "propcache-0.4.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:8c9b3cbe4584636d72ff556d9036e0c9317fa27b3ac1f0f558e7e84d1c9c5900"}, + {file = "propcache-0.4.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f93243fdc5657247533273ac4f86ae106cc6445a0efacb9a1bfe982fcfefd90c"}, + {file = "propcache-0.4.1-cp310-cp310-win32.whl", hash = "sha256:a0ee98db9c5f80785b266eb805016e36058ac72c51a064040f2bc43b61101cdb"}, + {file = "propcache-0.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:1cdb7988c4e5ac7f6d175a28a9aa0c94cb6f2ebe52756a3c0cda98d2809a9e37"}, + {file = "propcache-0.4.1-cp310-cp310-win_arm64.whl", hash = "sha256:d82ad62b19645419fe79dd63b3f9253e15b30e955c0170e5cebc350c1844e581"}, + {file = "propcache-0.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:60a8fda9644b7dfd5dece8c61d8a85e271cb958075bfc4e01083c148b61a7caf"}, + {file = "propcache-0.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c30b53e7e6bda1d547cabb47c825f3843a0a1a42b0496087bb58d8fedf9f41b5"}, + {file = "propcache-0.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6918ecbd897443087a3b7cd978d56546a812517dcaaca51b49526720571fa93e"}, + {file = "propcache-0.4.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3d902a36df4e5989763425a8ab9e98cd8ad5c52c823b34ee7ef307fd50582566"}, + {file = "propcache-0.4.1-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a9695397f85973bb40427dedddf70d8dc4a44b22f1650dd4af9eedf443d45165"}, + {file = "propcache-0.4.1-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2bb07ffd7eaad486576430c89f9b215f9e4be68c4866a96e97db9e97fead85dc"}, + {file = "propcache-0.4.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fd6f30fdcf9ae2a70abd34da54f18da086160e4d7d9251f81f3da0ff84fc5a48"}, + {file = "propcache-0.4.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:fc38cba02d1acba4e2869eef1a57a43dfbd3d49a59bf90dda7444ec2be6a5570"}, + {file = "propcache-0.4.1-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:67fad6162281e80e882fb3ec355398cf72864a54069d060321f6cd0ade95fe85"}, + {file = "propcache-0.4.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f10207adf04d08bec185bae14d9606a1444715bc99180f9331c9c02093e1959e"}, + {file = "propcache-0.4.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:e9b0d8d0845bbc4cfcdcbcdbf5086886bc8157aa963c31c777ceff7846c77757"}, + {file = "propcache-0.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:981333cb2f4c1896a12f4ab92a9cc8f09ea664e9b7dbdc4eff74627af3a11c0f"}, + {file = "propcache-0.4.1-cp311-cp311-win32.whl", hash = "sha256:f1d2f90aeec838a52f1c1a32fe9a619fefd5e411721a9117fbf82aea638fe8a1"}, + {file = "propcache-0.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:364426a62660f3f699949ac8c621aad6977be7126c5807ce48c0aeb8e7333ea6"}, + {file = "propcache-0.4.1-cp311-cp311-win_arm64.whl", hash = "sha256:e53f3a38d3510c11953f3e6a33f205c6d1b001129f972805ca9b42fc308bc239"}, + {file = "propcache-0.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e153e9cd40cc8945138822807139367f256f89c6810c2634a4f6902b52d3b4e2"}, + {file = "propcache-0.4.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:cd547953428f7abb73c5ad82cbb32109566204260d98e41e5dfdc682eb7f8403"}, + {file = "propcache-0.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f048da1b4f243fc44f205dfd320933a951b8d89e0afd4c7cacc762a8b9165207"}, + {file = "propcache-0.4.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ec17c65562a827bba85e3872ead335f95405ea1674860d96483a02f5c698fa72"}, + {file = "propcache-0.4.1-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:405aac25c6394ef275dee4c709be43745d36674b223ba4eb7144bf4d691b7367"}, + {file = "propcache-0.4.1-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0013cb6f8dde4b2a2f66903b8ba740bdfe378c943c4377a200551ceb27f379e4"}, + {file = "propcache-0.4.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:15932ab57837c3368b024473a525e25d316d8353016e7cc0e5ba9eb343fbb1cf"}, + {file = "propcache-0.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:031dce78b9dc099f4c29785d9cf5577a3faf9ebf74ecbd3c856a7b92768c3df3"}, + {file = "propcache-0.4.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:ab08df6c9a035bee56e31af99be621526bd237bea9f32def431c656b29e41778"}, + {file = "propcache-0.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4d7af63f9f93fe593afbf104c21b3b15868efb2c21d07d8732c0c4287e66b6a6"}, + {file = "propcache-0.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:cfc27c945f422e8b5071b6e93169679e4eb5bf73bbcbf1ba3ae3a83d2f78ebd9"}, + {file = "propcache-0.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:35c3277624a080cc6ec6f847cbbbb5b49affa3598c4535a0a4682a697aaa5c75"}, + {file = "propcache-0.4.1-cp312-cp312-win32.whl", hash = "sha256:671538c2262dadb5ba6395e26c1731e1d52534bfe9ae56d0b5573ce539266aa8"}, + {file = "propcache-0.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:cb2d222e72399fcf5890d1d5cc1060857b9b236adff2792ff48ca2dfd46c81db"}, + {file = "propcache-0.4.1-cp312-cp312-win_arm64.whl", hash = "sha256:204483131fb222bdaaeeea9f9e6c6ed0cac32731f75dfc1d4a567fc1926477c1"}, + {file = "propcache-0.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:43eedf29202c08550aac1d14e0ee619b0430aaef78f85864c1a892294fbc28cf"}, + {file = "propcache-0.4.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d62cdfcfd89ccb8de04e0eda998535c406bf5e060ffd56be6c586cbcc05b3311"}, + {file = "propcache-0.4.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cae65ad55793da34db5f54e4029b89d3b9b9490d8abe1b4c7ab5d4b8ec7ebf74"}, + {file = "propcache-0.4.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:333ddb9031d2704a301ee3e506dc46b1fe5f294ec198ed6435ad5b6a085facfe"}, + {file = "propcache-0.4.1-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:fd0858c20f078a32cf55f7e81473d96dcf3b93fd2ccdb3d40fdf54b8573df3af"}, + {file = "propcache-0.4.1-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:678ae89ebc632c5c204c794f8dab2837c5f159aeb59e6ed0539500400577298c"}, + {file = "propcache-0.4.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d472aeb4fbf9865e0c6d622d7f4d54a4e101a89715d8904282bb5f9a2f476c3f"}, + {file = "propcache-0.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4d3df5fa7e36b3225954fba85589da77a0fe6a53e3976de39caf04a0db4c36f1"}, + {file = "propcache-0.4.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:ee17f18d2498f2673e432faaa71698032b0127ebf23ae5974eeaf806c279df24"}, + {file = "propcache-0.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:580e97762b950f993ae618e167e7be9256b8353c2dcd8b99ec100eb50f5286aa"}, + {file = "propcache-0.4.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:501d20b891688eb8e7aa903021f0b72d5a55db40ffaab27edefd1027caaafa61"}, + {file = "propcache-0.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9a0bd56e5b100aef69bd8562b74b46254e7c8812918d3baa700c8a8009b0af66"}, + {file = "propcache-0.4.1-cp313-cp313-win32.whl", hash = "sha256:bcc9aaa5d80322bc2fb24bb7accb4a30f81e90ab8d6ba187aec0744bc302ad81"}, + {file = "propcache-0.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:381914df18634f5494334d201e98245c0596067504b9372d8cf93f4bb23e025e"}, + {file = "propcache-0.4.1-cp313-cp313-win_arm64.whl", hash = "sha256:8873eb4460fd55333ea49b7d189749ecf6e55bf85080f11b1c4530ed3034cba1"}, + {file = "propcache-0.4.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:92d1935ee1f8d7442da9c0c4fa7ac20d07e94064184811b685f5c4fada64553b"}, + {file = "propcache-0.4.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:473c61b39e1460d386479b9b2f337da492042447c9b685f28be4f74d3529e566"}, + {file = "propcache-0.4.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:c0ef0aaafc66fbd87842a3fe3902fd889825646bc21149eafe47be6072725835"}, + {file = "propcache-0.4.1-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f95393b4d66bfae908c3ca8d169d5f79cd65636ae15b5e7a4f6e67af675adb0e"}, + {file = "propcache-0.4.1-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c07fda85708bc48578467e85099645167a955ba093be0a2dcba962195676e859"}, + {file = "propcache-0.4.1-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:af223b406d6d000830c6f65f1e6431783fc3f713ba3e6cc8c024d5ee96170a4b"}, + {file = "propcache-0.4.1-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a78372c932c90ee474559c5ddfffd718238e8673c340dc21fe45c5b8b54559a0"}, + {file = "propcache-0.4.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:564d9f0d4d9509e1a870c920a89b2fec951b44bf5ba7d537a9e7c1ccec2c18af"}, + {file = "propcache-0.4.1-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:17612831fda0138059cc5546f4d12a2aacfb9e47068c06af35c400ba58ba7393"}, + {file = "propcache-0.4.1-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:41a89040cb10bd345b3c1a873b2bf36413d48da1def52f268a055f7398514874"}, + {file = "propcache-0.4.1-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:e35b88984e7fa64aacecea39236cee32dd9bd8c55f57ba8a75cf2399553f9bd7"}, + {file = "propcache-0.4.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6f8b465489f927b0df505cbe26ffbeed4d6d8a2bbc61ce90eb074ff129ef0ab1"}, + {file = "propcache-0.4.1-cp313-cp313t-win32.whl", hash = "sha256:2ad890caa1d928c7c2965b48f3a3815c853180831d0e5503d35cf00c472f4717"}, + {file = "propcache-0.4.1-cp313-cp313t-win_amd64.whl", hash = "sha256:f7ee0e597f495cf415bcbd3da3caa3bd7e816b74d0d52b8145954c5e6fd3ff37"}, + {file = "propcache-0.4.1-cp313-cp313t-win_arm64.whl", hash = "sha256:929d7cbe1f01bb7baffb33dc14eb5691c95831450a26354cd210a8155170c93a"}, + {file = "propcache-0.4.1-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:3f7124c9d820ba5548d431afb4632301acf965db49e666aa21c305cbe8c6de12"}, + {file = "propcache-0.4.1-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:c0d4b719b7da33599dfe3b22d3db1ef789210a0597bc650b7cee9c77c2be8c5c"}, + {file = "propcache-0.4.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:9f302f4783709a78240ebc311b793f123328716a60911d667e0c036bc5dcbded"}, + {file = "propcache-0.4.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c80ee5802e3fb9ea37938e7eecc307fb984837091d5fd262bb37238b1ae97641"}, + {file = "propcache-0.4.1-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ed5a841e8bb29a55fb8159ed526b26adc5bdd7e8bd7bf793ce647cb08656cdf4"}, + {file = "propcache-0.4.1-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:55c72fd6ea2da4c318e74ffdf93c4fe4e926051133657459131a95c846d16d44"}, + {file = "propcache-0.4.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8326e144341460402713f91df60ade3c999d601e7eb5ff8f6f7862d54de0610d"}, + {file = "propcache-0.4.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:060b16ae65bc098da7f6d25bf359f1f31f688384858204fe5d652979e0015e5b"}, + {file = "propcache-0.4.1-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:89eb3fa9524f7bec9de6e83cf3faed9d79bffa560672c118a96a171a6f55831e"}, + {file = "propcache-0.4.1-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:dee69d7015dc235f526fe80a9c90d65eb0039103fe565776250881731f06349f"}, + {file = "propcache-0.4.1-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:5558992a00dfd54ccbc64a32726a3357ec93825a418a401f5cc67df0ac5d9e49"}, + {file = "propcache-0.4.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:c9b822a577f560fbd9554812526831712c1436d2c046cedee4c3796d3543b144"}, + {file = "propcache-0.4.1-cp314-cp314-win32.whl", hash = "sha256:ab4c29b49d560fe48b696cdcb127dd36e0bc2472548f3bf56cc5cb3da2b2984f"}, + {file = "propcache-0.4.1-cp314-cp314-win_amd64.whl", hash = "sha256:5a103c3eb905fcea0ab98be99c3a9a5ab2de60228aa5aceedc614c0281cf6153"}, + {file = "propcache-0.4.1-cp314-cp314-win_arm64.whl", hash = "sha256:74c1fb26515153e482e00177a1ad654721bf9207da8a494a0c05e797ad27b992"}, + {file = "propcache-0.4.1-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:824e908bce90fb2743bd6b59db36eb4f45cd350a39637c9f73b1c1ea66f5b75f"}, + {file = "propcache-0.4.1-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:c2b5e7db5328427c57c8e8831abda175421b709672f6cfc3d630c3b7e2146393"}, + {file = "propcache-0.4.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:6f6ff873ed40292cd4969ef5310179afd5db59fdf055897e282485043fc80ad0"}, + {file = "propcache-0.4.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:49a2dc67c154db2c1463013594c458881a069fcf98940e61a0569016a583020a"}, + {file = "propcache-0.4.1-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:005f08e6a0529984491e37d8dbc3dd86f84bd78a8ceb5fa9a021f4c48d4984be"}, + {file = "propcache-0.4.1-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5c3310452e0d31390da9035c348633b43d7e7feb2e37be252be6da45abd1abcc"}, + {file = "propcache-0.4.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4c3c70630930447f9ef1caac7728c8ad1c56bc5015338b20fed0d08ea2480b3a"}, + {file = "propcache-0.4.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:8e57061305815dfc910a3634dcf584f08168a8836e6999983569f51a8544cd89"}, + {file = "propcache-0.4.1-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:521a463429ef54143092c11a77e04056dd00636f72e8c45b70aaa3140d639726"}, + {file = "propcache-0.4.1-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:120c964da3fdc75e3731aa392527136d4ad35868cc556fd09bb6d09172d9a367"}, + {file = "propcache-0.4.1-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:d8f353eb14ee3441ee844ade4277d560cdd68288838673273b978e3d6d2c8f36"}, + {file = "propcache-0.4.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:ab2943be7c652f09638800905ee1bab2c544e537edb57d527997a24c13dc1455"}, + {file = "propcache-0.4.1-cp314-cp314t-win32.whl", hash = "sha256:05674a162469f31358c30bcaa8883cb7829fa3110bf9c0991fe27d7896c42d85"}, + {file = "propcache-0.4.1-cp314-cp314t-win_amd64.whl", hash = "sha256:990f6b3e2a27d683cb7602ed6c86f15ee6b43b1194736f9baaeb93d0016633b1"}, + {file = "propcache-0.4.1-cp314-cp314t-win_arm64.whl", hash = "sha256:ecef2343af4cc68e05131e45024ba34f6095821988a9d0a02aa7c73fcc448aa9"}, + {file = "propcache-0.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:3d233076ccf9e450c8b3bc6720af226b898ef5d051a2d145f7d765e6e9f9bcff"}, + {file = "propcache-0.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:357f5bb5c377a82e105e44bd3d52ba22b616f7b9773714bff93573988ef0a5fb"}, + {file = "propcache-0.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cbc3b6dfc728105b2a57c06791eb07a94229202ea75c59db644d7d496b698cac"}, + {file = "propcache-0.4.1-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:182b51b421f0501952d938dc0b0eb45246a5b5153c50d42b495ad5fb7517c888"}, + {file = "propcache-0.4.1-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:4b536b39c5199b96fc6245eb5fb796c497381d3942f169e44e8e392b29c9ebcc"}, + {file = "propcache-0.4.1-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:db65d2af507bbfbdcedb254a11149f894169d90488dd3e7190f7cdcb2d6cd57a"}, + {file = "propcache-0.4.1-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fd2dbc472da1f772a4dae4fa24be938a6c544671a912e30529984dd80400cd88"}, + {file = "propcache-0.4.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:daede9cd44e0f8bdd9e6cc9a607fc81feb80fae7a5fc6cecaff0e0bb32e42d00"}, + {file = "propcache-0.4.1-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:71b749281b816793678ae7f3d0d84bd36e694953822eaad408d682efc5ca18e0"}, + {file = "propcache-0.4.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:0002004213ee1f36cfb3f9a42b5066100c44276b9b72b4e1504cddd3d692e86e"}, + {file = "propcache-0.4.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:fe49d0a85038f36ba9e3ffafa1103e61170b28e95b16622e11be0a0ea07c6781"}, + {file = "propcache-0.4.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:99d43339c83aaf4d32bda60928231848eee470c6bda8d02599cc4cebe872d183"}, + {file = "propcache-0.4.1-cp39-cp39-win32.whl", hash = "sha256:a129e76735bc792794d5177069691c3217898b9f5cee2b2661471e52ffe13f19"}, + {file = "propcache-0.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:948dab269721ae9a87fd16c514a0a2c2a1bdb23a9a61b969b0f9d9ee2968546f"}, + {file = "propcache-0.4.1-cp39-cp39-win_arm64.whl", hash = "sha256:5fd37c406dd6dc85aa743e214cef35dc54bbdd1419baac4f6ae5e5b1a2976938"}, + {file = "propcache-0.4.1-py3-none-any.whl", hash = "sha256:af2a6052aeb6cf17d3e46ee169099044fd8224cbaf75c76a2ef596e8163e2237"}, + {file = "propcache-0.4.1.tar.gz", hash = "sha256:f48107a8c637e80362555f37ecf49abe20370e557cc4ab374f04ec4423c97c3d"}, +] + +[[package]] +name = "protobuf" +version = "6.33.5" +description = "" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "protobuf-6.33.5-cp310-abi3-win32.whl", hash = "sha256:d71b040839446bac0f4d162e758bea99c8251161dae9d0983a3b88dee345153b"}, + {file = "protobuf-6.33.5-cp310-abi3-win_amd64.whl", hash = "sha256:3093804752167bcab3998bec9f1048baae6e29505adaf1afd14a37bddede533c"}, + {file = "protobuf-6.33.5-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:a5cb85982d95d906df1e2210e58f8e4f1e3cdc088e52c921a041f9c9a0386de5"}, + {file = "protobuf-6.33.5-cp39-abi3-manylinux2014_aarch64.whl", hash = "sha256:9b71e0281f36f179d00cbcb119cb19dec4d14a81393e5ea220f64b286173e190"}, + {file = "protobuf-6.33.5-cp39-abi3-manylinux2014_s390x.whl", hash = "sha256:8afa18e1d6d20af15b417e728e9f60f3aa108ee76f23c3b2c07a2c3b546d3afd"}, + {file = "protobuf-6.33.5-cp39-abi3-manylinux2014_x86_64.whl", hash = "sha256:cbf16ba3350fb7b889fca858fb215967792dc125b35c7976ca4818bee3521cf0"}, + {file = "protobuf-6.33.5-cp39-cp39-win32.whl", hash = "sha256:a3157e62729aafb8df6da2c03aa5c0937c7266c626ce11a278b6eb7963c4e37c"}, + {file = "protobuf-6.33.5-cp39-cp39-win_amd64.whl", hash = "sha256:8f04fa32763dcdb4973d537d6b54e615cc61108c7cb38fe59310c3192d29510a"}, + {file = "protobuf-6.33.5-py3-none-any.whl", hash = "sha256:69915a973dd0f60f31a08b8318b73eab2bd6a392c79184b3612226b0a3f8ec02"}, + {file = "protobuf-6.33.5.tar.gz", hash = "sha256:6ddcac2a081f8b7b9642c09406bc6a4290128fce5f471cddd165960bb9119e5c"}, +] + +[[package]] +name = "pydantic" +version = "2.12.5" +description = "Data validation using Python type hints" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "pydantic-2.12.5-py3-none-any.whl", hash = "sha256:e561593fccf61e8a20fc46dfc2dfe075b8be7d0188df33f221ad1f0139180f9d"}, + {file = "pydantic-2.12.5.tar.gz", hash = "sha256:4d351024c75c0f085a9febbb665ce8c0c6ec5d30e903bdb6394b7ede26aebb49"}, +] + +[package.dependencies] +annotated-types = ">=0.6.0" +pydantic-core = "2.41.5" +typing-extensions = ">=4.14.1" +typing-inspection = ">=0.4.2" + +[package.extras] +email = ["email-validator (>=2.0.0)"] +timezone = ["tzdata ; python_version >= \"3.9\" and platform_system == \"Windows\""] + +[[package]] +name = "pydantic-core" +version = "2.41.5" +description = "Core functionality for Pydantic validation and serialization" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "pydantic_core-2.41.5-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:77b63866ca88d804225eaa4af3e664c5faf3568cea95360d21f4725ab6e07146"}, + {file = "pydantic_core-2.41.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:dfa8a0c812ac681395907e71e1274819dec685fec28273a28905df579ef137e2"}, + {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5921a4d3ca3aee735d9fd163808f5e8dd6c6972101e4adbda9a4667908849b97"}, + {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e25c479382d26a2a41b7ebea1043564a937db462816ea07afa8a44c0866d52f9"}, + {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f547144f2966e1e16ae626d8ce72b4cfa0caedc7fa28052001c94fb2fcaa1c52"}, + {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f52298fbd394f9ed112d56f3d11aabd0d5bd27beb3084cc3d8ad069483b8941"}, + {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:100baa204bb412b74fe285fb0f3a385256dad1d1879f0a5cb1499ed2e83d132a"}, + {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:05a2c8852530ad2812cb7914dc61a1125dc4e06252ee98e5638a12da6cc6fb6c"}, + {file = "pydantic_core-2.41.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:29452c56df2ed968d18d7e21f4ab0ac55e71dc59524872f6fc57dcf4a3249ed2"}, + {file = "pydantic_core-2.41.5-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:d5160812ea7a8a2ffbe233d8da666880cad0cbaf5d4de74ae15c313213d62556"}, + {file = "pydantic_core-2.41.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:df3959765b553b9440adfd3c795617c352154e497a4eaf3752555cfb5da8fc49"}, + {file = "pydantic_core-2.41.5-cp310-cp310-win32.whl", hash = "sha256:1f8d33a7f4d5a7889e60dc39856d76d09333d8a6ed0f5f1190635cbec70ec4ba"}, + {file = "pydantic_core-2.41.5-cp310-cp310-win_amd64.whl", hash = "sha256:62de39db01b8d593e45871af2af9e497295db8d73b085f6bfd0b18c83c70a8f9"}, + {file = "pydantic_core-2.41.5-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:a3a52f6156e73e7ccb0f8cced536adccb7042be67cb45f9562e12b319c119da6"}, + {file = "pydantic_core-2.41.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7f3bf998340c6d4b0c9a2f02d6a400e51f123b59565d74dc60d252ce888c260b"}, + {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:378bec5c66998815d224c9ca994f1e14c0c21cb95d2f52b6021cc0b2a58f2a5a"}, + {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e7b576130c69225432866fe2f4a469a85a54ade141d96fd396dffcf607b558f8"}, + {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6cb58b9c66f7e4179a2d5e0f849c48eff5c1fca560994d6eb6543abf955a149e"}, + {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:88942d3a3dff3afc8288c21e565e476fc278902ae4d6d134f1eeda118cc830b1"}, + {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f31d95a179f8d64d90f6831d71fa93290893a33148d890ba15de25642c5d075b"}, + {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c1df3d34aced70add6f867a8cf413e299177e0c22660cc767218373d0779487b"}, + {file = "pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4009935984bd36bd2c774e13f9a09563ce8de4abaa7226f5108262fa3e637284"}, + {file = "pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:34a64bc3441dc1213096a20fe27e8e128bd3ff89921706e83c0b1ac971276594"}, + {file = "pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c9e19dd6e28fdcaa5a1de679aec4141f691023916427ef9bae8584f9c2fb3b0e"}, + {file = "pydantic_core-2.41.5-cp311-cp311-win32.whl", hash = "sha256:2c010c6ded393148374c0f6f0bf89d206bf3217f201faa0635dcd56bd1520f6b"}, + {file = "pydantic_core-2.41.5-cp311-cp311-win_amd64.whl", hash = "sha256:76ee27c6e9c7f16f47db7a94157112a2f3a00e958bc626e2f4ee8bec5c328fbe"}, + {file = "pydantic_core-2.41.5-cp311-cp311-win_arm64.whl", hash = "sha256:4bc36bbc0b7584de96561184ad7f012478987882ebf9f9c389b23f432ea3d90f"}, + {file = "pydantic_core-2.41.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f41a7489d32336dbf2199c8c0a215390a751c5b014c2c1c5366e817202e9cdf7"}, + {file = "pydantic_core-2.41.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:070259a8818988b9a84a449a2a7337c7f430a22acc0859c6b110aa7212a6d9c0"}, + {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e96cea19e34778f8d59fe40775a7a574d95816eb150850a85a7a4c8f4b94ac69"}, + {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed2e99c456e3fadd05c991f8f437ef902e00eedf34320ba2b0842bd1c3ca3a75"}, + {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65840751b72fbfd82c3c640cff9284545342a4f1eb1586ad0636955b261b0b05"}, + {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e536c98a7626a98feb2d3eaf75944ef6f3dbee447e1f841eae16f2f0a72d8ddc"}, + {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eceb81a8d74f9267ef4081e246ffd6d129da5d87e37a77c9bde550cb04870c1c"}, + {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d38548150c39b74aeeb0ce8ee1d8e82696f4a4e16ddc6de7b1d8823f7de4b9b5"}, + {file = "pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c23e27686783f60290e36827f9c626e63154b82b116d7fe9adba1fda36da706c"}, + {file = "pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:482c982f814460eabe1d3bb0adfdc583387bd4691ef00b90575ca0d2b6fe2294"}, + {file = "pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:bfea2a5f0b4d8d43adf9d7b8bf019fb46fdd10a2e5cde477fbcb9d1fa08c68e1"}, + {file = "pydantic_core-2.41.5-cp312-cp312-win32.whl", hash = "sha256:b74557b16e390ec12dca509bce9264c3bbd128f8a2c376eaa68003d7f327276d"}, + {file = "pydantic_core-2.41.5-cp312-cp312-win_amd64.whl", hash = "sha256:1962293292865bca8e54702b08a4f26da73adc83dd1fcf26fbc875b35d81c815"}, + {file = "pydantic_core-2.41.5-cp312-cp312-win_arm64.whl", hash = "sha256:1746d4a3d9a794cacae06a5eaaccb4b8643a131d45fbc9af23e353dc0a5ba5c3"}, + {file = "pydantic_core-2.41.5-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:941103c9be18ac8daf7b7adca8228f8ed6bb7a1849020f643b3a14d15b1924d9"}, + {file = "pydantic_core-2.41.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:112e305c3314f40c93998e567879e887a3160bb8689ef3d2c04b6cc62c33ac34"}, + {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cbaad15cb0c90aa221d43c00e77bb33c93e8d36e0bf74760cd00e732d10a6a0"}, + {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:03ca43e12fab6023fc79d28ca6b39b05f794ad08ec2feccc59a339b02f2b3d33"}, + {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc799088c08fa04e43144b164feb0c13f9a0bc40503f8df3e9fde58a3c0c101e"}, + {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97aeba56665b4c3235a0e52b2c2f5ae9cd071b8a8310ad27bddb3f7fb30e9aa2"}, + {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:406bf18d345822d6c21366031003612b9c77b3e29ffdb0f612367352aab7d586"}, + {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b93590ae81f7010dbe380cdeab6f515902ebcbefe0b9327cc4804d74e93ae69d"}, + {file = "pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:01a3d0ab748ee531f4ea6c3e48ad9dac84ddba4b0d82291f87248f2f9de8d740"}, + {file = "pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:6561e94ba9dacc9c61bce40e2d6bdc3bfaa0259d3ff36ace3b1e6901936d2e3e"}, + {file = "pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:915c3d10f81bec3a74fbd4faebe8391013ba61e5a1a8d48c4455b923bdda7858"}, + {file = "pydantic_core-2.41.5-cp313-cp313-win32.whl", hash = "sha256:650ae77860b45cfa6e2cdafc42618ceafab3a2d9a3811fcfbd3bbf8ac3c40d36"}, + {file = "pydantic_core-2.41.5-cp313-cp313-win_amd64.whl", hash = "sha256:79ec52ec461e99e13791ec6508c722742ad745571f234ea6255bed38c6480f11"}, + {file = "pydantic_core-2.41.5-cp313-cp313-win_arm64.whl", hash = "sha256:3f84d5c1b4ab906093bdc1ff10484838aca54ef08de4afa9de0f5f14d69639cd"}, + {file = "pydantic_core-2.41.5-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:3f37a19d7ebcdd20b96485056ba9e8b304e27d9904d233d7b1015db320e51f0a"}, + {file = "pydantic_core-2.41.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1d1d9764366c73f996edd17abb6d9d7649a7eb690006ab6adbda117717099b14"}, + {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25e1c2af0fce638d5f1988b686f3b3ea8cd7de5f244ca147c777769e798a9cd1"}, + {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:506d766a8727beef16b7adaeb8ee6217c64fc813646b424d0804d67c16eddb66"}, + {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4819fa52133c9aa3c387b3328f25c1facc356491e6135b459f1de698ff64d869"}, + {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2b761d210c9ea91feda40d25b4efe82a1707da2ef62901466a42492c028553a2"}, + {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22f0fb8c1c583a3b6f24df2470833b40207e907b90c928cc8d3594b76f874375"}, + {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2782c870e99878c634505236d81e5443092fba820f0373997ff75f90f68cd553"}, + {file = "pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:0177272f88ab8312479336e1d777f6b124537d47f2123f89cb37e0accea97f90"}, + {file = "pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:63510af5e38f8955b8ee5687740d6ebf7c2a0886d15a6d65c32814613681bc07"}, + {file = "pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:e56ba91f47764cc14f1daacd723e3e82d1a89d783f0f5afe9c364b8bb491ccdb"}, + {file = "pydantic_core-2.41.5-cp314-cp314-win32.whl", hash = "sha256:aec5cf2fd867b4ff45b9959f8b20ea3993fc93e63c7363fe6851424c8a7e7c23"}, + {file = "pydantic_core-2.41.5-cp314-cp314-win_amd64.whl", hash = "sha256:8e7c86f27c585ef37c35e56a96363ab8de4e549a95512445b85c96d3e2f7c1bf"}, + {file = "pydantic_core-2.41.5-cp314-cp314-win_arm64.whl", hash = "sha256:e672ba74fbc2dc8eea59fb6d4aed6845e6905fc2a8afe93175d94a83ba2a01a0"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:8566def80554c3faa0e65ac30ab0932b9e3a5cd7f8323764303d468e5c37595a"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b80aa5095cd3109962a298ce14110ae16b8c1aece8b72f9dafe81cf597ad80b3"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3006c3dd9ba34b0c094c544c6006cc79e87d8612999f1a5d43b769b89181f23c"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:72f6c8b11857a856bcfa48c86f5368439f74453563f951e473514579d44aa612"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5cb1b2f9742240e4bb26b652a5aeb840aa4b417c7748b6f8387927bc6e45e40d"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd3d54f38609ff308209bd43acea66061494157703364ae40c951f83ba99a1a9"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ff4321e56e879ee8d2a879501c8e469414d948f4aba74a2d4593184eb326660"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d0d2568a8c11bf8225044aa94409e21da0cb09dcdafe9ecd10250b2baad531a9"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:a39455728aabd58ceabb03c90e12f71fd30fa69615760a075b9fec596456ccc3"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_armv7l.whl", hash = "sha256:239edca560d05757817c13dc17c50766136d21f7cd0fac50295499ae24f90fdf"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:2a5e06546e19f24c6a96a129142a75cee553cc018ffee48a460059b1185f4470"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-win32.whl", hash = "sha256:b4ececa40ac28afa90871c2cc2b9ffd2ff0bf749380fbdf57d165fd23da353aa"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-win_amd64.whl", hash = "sha256:80aa89cad80b32a912a65332f64a4450ed00966111b6615ca6816153d3585a8c"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-win_arm64.whl", hash = "sha256:35b44f37a3199f771c3eaa53051bc8a70cd7b54f333531c59e29fd4db5d15008"}, + {file = "pydantic_core-2.41.5-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:8bfeaf8735be79f225f3fefab7f941c712aaca36f1128c9d7e2352ee1aa87bdf"}, + {file = "pydantic_core-2.41.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:346285d28e4c8017da95144c7f3acd42740d637ff41946af5ce6e5e420502dd5"}, + {file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a75dafbf87d6276ddc5b2bf6fae5254e3d0876b626eb24969a574fff9149ee5d"}, + {file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7b93a4d08587e2b7e7882de461e82b6ed76d9026ce91ca7915e740ecc7855f60"}, + {file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e8465ab91a4bd96d36dde3263f06caa6a8a6019e4113f24dc753d79a8b3a3f82"}, + {file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:299e0a22e7ae2b85c1a57f104538b2656e8ab1873511fd718a1c1c6f149b77b5"}, + {file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:707625ef0983fcfb461acfaf14de2067c5942c6bb0f3b4c99158bed6fedd3cf3"}, + {file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f41eb9797986d6ebac5e8edff36d5cef9de40def462311b3eb3eeded1431e425"}, + {file = "pydantic_core-2.41.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0384e2e1021894b1ff5a786dbf94771e2986ebe2869533874d7e43bc79c6f504"}, + {file = "pydantic_core-2.41.5-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:f0cd744688278965817fd0839c4a4116add48d23890d468bc436f78beb28abf5"}, + {file = "pydantic_core-2.41.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:753e230374206729bf0a807954bcc6c150d3743928a73faffee51ac6557a03c3"}, + {file = "pydantic_core-2.41.5-cp39-cp39-win32.whl", hash = "sha256:873e0d5b4fb9b89ef7c2d2a963ea7d02879d9da0da8d9d4933dee8ee86a8b460"}, + {file = "pydantic_core-2.41.5-cp39-cp39-win_amd64.whl", hash = "sha256:e4f4a984405e91527a0d62649ee21138f8e3d0ef103be488c1dc11a80d7f184b"}, + {file = "pydantic_core-2.41.5-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:b96d5f26b05d03cc60f11a7761a5ded1741da411e7fe0909e27a5e6a0cb7b034"}, + {file = "pydantic_core-2.41.5-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:634e8609e89ceecea15e2d61bc9ac3718caaaa71963717bf3c8f38bfde64242c"}, + {file = "pydantic_core-2.41.5-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:93e8740d7503eb008aa2df04d3b9735f845d43ae845e6dcd2be0b55a2da43cd2"}, + {file = "pydantic_core-2.41.5-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f15489ba13d61f670dcc96772e733aad1a6f9c429cc27574c6cdaed82d0146ad"}, + {file = "pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:7da7087d756b19037bc2c06edc6c170eeef3c3bafcb8f532ff17d64dc427adfd"}, + {file = "pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:aabf5777b5c8ca26f7824cb4a120a740c9588ed58df9b2d196ce92fba42ff8dc"}, + {file = "pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c007fe8a43d43b3969e8469004e9845944f1a80e6acd47c150856bb87f230c56"}, + {file = "pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76d0819de158cd855d1cbb8fcafdf6f5cf1eb8e470abe056d5d161106e38062b"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b5819cd790dbf0c5eb9f82c73c16b39a65dd6dd4d1439dcdea7816ec9adddab8"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:5a4e67afbc95fa5c34cf27d9089bca7fcab4e51e57278d710320a70b956d1b9a"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ece5c59f0ce7d001e017643d8d24da587ea1f74f6993467d85ae8a5ef9d4f42b"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:16f80f7abe3351f8ea6858914ddc8c77e02578544a0ebc15b4c2e1a0e813b0b2"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:33cb885e759a705b426baada1fe68cbb0a2e68e34c5d0d0289a364cf01709093"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:c8d8b4eb992936023be7dee581270af5c6e0697a8559895f527f5b7105ecd36a"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:242a206cd0318f95cd21bdacff3fcc3aab23e79bba5cac3db5a841c9ef9c6963"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d3a978c4f57a597908b7e697229d996d77a6d3c94901e9edee593adada95ce1a"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b2379fa7ed44ddecb5bfe4e48577d752db9fc10be00a6b7446e9663ba143de26"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:266fb4cbf5e3cbd0b53669a6d1b039c45e3ce651fd5442eff4d07c2cc8d66808"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58133647260ea01e4d0500089a8c4f07bd7aa6ce109682b1426394988d8aaacc"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:287dad91cfb551c363dc62899a80e9e14da1f0e2b6ebde82c806612ca2a13ef1"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:03b77d184b9eb40240ae9fd676ca364ce1085f203e1b1256f8ab9984dca80a84"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:a668ce24de96165bb239160b3d854943128f4334822900534f2fe947930e5770"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f14f8f046c14563f8eb3f45f499cc658ab8d10072961e07225e507adb700e93f"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:56121965f7a4dc965bff783d70b907ddf3d57f6eba29b6d2e5dabfaf07799c51"}, + {file = "pydantic_core-2.41.5.tar.gz", hash = "sha256:08daa51ea16ad373ffd5e7606252cc32f07bc72b28284b6bc9c6df804816476e"}, +] + +[package.dependencies] +typing-extensions = ">=4.14.1" + +[[package]] +name = "pydantic-settings" +version = "2.13.1" +description = "Settings management using Pydantic" +optional = false +python-versions = ">=3.10" +groups = ["main"] +files = [ + {file = "pydantic_settings-2.13.1-py3-none-any.whl", hash = "sha256:d56fd801823dbeae7f0975e1f8c8e25c258eb75d278ea7abb5d9cebb01b56237"}, + {file = "pydantic_settings-2.13.1.tar.gz", hash = "sha256:b4c11847b15237fb0171e1462bf540e294affb9b86db4d9aa5c01730bdbe4025"}, +] + +[package.dependencies] +pydantic = ">=2.7.0" +python-dotenv = ">=0.21.0" +typing-inspection = ">=0.4.0" + +[package.extras] +aws-secrets-manager = ["boto3 (>=1.35.0)", "boto3-stubs[secretsmanager]"] +azure-key-vault = ["azure-identity (>=1.16.0)", "azure-keyvault-secrets (>=4.8.0)"] +gcp-secret-manager = ["google-cloud-secret-manager (>=2.23.1)"] +toml = ["tomli (>=2.0.1)"] +yaml = ["pyyaml (>=6.0.1)"] + +[[package]] +name = "pyee" +version = "13.0.1" +description = "A rough port of Node.js's EventEmitter to Python with a few tricks of its own" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "pyee-13.0.1-py3-none-any.whl", hash = "sha256:af2f8fede4171ef667dfded53f96e2ed0d6e6bd7ee3bb46437f77e3b57689228"}, + {file = "pyee-13.0.1.tar.gz", hash = "sha256:0b931f7c14535667ed4c7e0d531716368715e860b988770fc7eb8578d1f67fc8"}, +] + +[package.dependencies] +typing-extensions = "*" + +[package.extras] +dev = ["black", "build", "flake8", "flake8-black", "isort", "jupyter-console", "mkdocs", "mkdocs-include-markdown-plugin", "mkdocstrings[python]", "mypy", "pytest", "pytest-asyncio ; python_version >= \"3.4\"", "pytest-trio ; python_version >= \"3.7\"", "sphinx", "toml", "tox", "trio", "trio ; python_version > \"3.6\"", "trio-typing ; python_version > \"3.6\"", "twine", "twisted", "validate-pyproject[all]"] + +[[package]] +name = "pygments" +version = "2.19.2" +description = "Pygments is a syntax highlighting package written in Python." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b"}, + {file = "pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887"}, +] + +[package.extras] +windows-terminal = ["colorama (>=0.4.6)"] + +[[package]] +name = "pytesseract" +version = "0.3.13" +description = "Python-tesseract is a python wrapper for Google's Tesseract-OCR" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "pytesseract-0.3.13-py3-none-any.whl", hash = "sha256:7a99c6c2ac598360693d83a416e36e0b33a67638bb9d77fdcac094a3589d4b34"}, + {file = "pytesseract-0.3.13.tar.gz", hash = "sha256:4bf5f880c99406f52a3cfc2633e42d9dc67615e69d8a509d74867d3baddb5db9"}, +] + +[package.dependencies] +packaging = ">=21.3" +Pillow = ">=8.0.0" + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["main"] +files = [ + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "python-dotenv" +version = "1.2.2" +description = "Read key-value pairs from a .env file and set them as environment variables" +optional = false +python-versions = ">=3.10" +groups = ["main"] +files = [ + {file = "python_dotenv-1.2.2-py3-none-any.whl", hash = "sha256:1d8214789a24de455a8b8bd8ae6fe3c6b69a5e3d64aa8a8e5d68e694bbcb285a"}, + {file = "python_dotenv-1.2.2.tar.gz", hash = "sha256:2c371a91fbd7ba082c2c1dc1f8bf89ca22564a087c2c287cd9b662adde799cf3"}, +] + +[package.extras] +cli = ["click (>=5.0)"] + +[[package]] +name = "pyyaml" +version = "6.0.3" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "PyYAML-6.0.3-cp38-cp38-macosx_10_13_x86_64.whl", hash = "sha256:c2514fceb77bc5e7a2f7adfaa1feb2fb311607c9cb518dbc378688ec73d8292f"}, + {file = "PyYAML-6.0.3-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9c57bb8c96f6d1808c030b1687b9b5fb476abaa47f0db9c0101f5e9f394e97f4"}, + {file = "PyYAML-6.0.3-cp38-cp38-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:efd7b85f94a6f21e4932043973a7ba2613b059c4a000551892ac9f1d11f5baf3"}, + {file = "PyYAML-6.0.3-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:22ba7cfcad58ef3ecddc7ed1db3409af68d023b7f940da23c6c2a1890976eda6"}, + {file = "PyYAML-6.0.3-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:6344df0d5755a2c9a276d4473ae6b90647e216ab4757f8426893b5dd2ac3f369"}, + {file = "PyYAML-6.0.3-cp38-cp38-win32.whl", hash = "sha256:3ff07ec89bae51176c0549bc4c63aa6202991da2d9a6129d7aef7f1407d3f295"}, + {file = "PyYAML-6.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:5cf4e27da7e3fbed4d6c3d8e797387aaad68102272f8f9752883bc32d61cb87b"}, + {file = "pyyaml-6.0.3-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:214ed4befebe12df36bcc8bc2b64b396ca31be9304b8f59e25c11cf94a4c033b"}, + {file = "pyyaml-6.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:02ea2dfa234451bbb8772601d7b8e426c2bfa197136796224e50e35a78777956"}, + {file = "pyyaml-6.0.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b30236e45cf30d2b8e7b3e85881719e98507abed1011bf463a8fa23e9c3e98a8"}, + {file = "pyyaml-6.0.3-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:66291b10affd76d76f54fad28e22e51719ef9ba22b29e1d7d03d6777a9174198"}, + {file = "pyyaml-6.0.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9c7708761fccb9397fe64bbc0395abcae8c4bf7b0eac081e12b809bf47700d0b"}, + {file = "pyyaml-6.0.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:418cf3f2111bc80e0933b2cd8cd04f286338bb88bdc7bc8e6dd775ebde60b5e0"}, + {file = "pyyaml-6.0.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:5e0b74767e5f8c593e8c9b5912019159ed0533c70051e9cce3e8b6aa699fcd69"}, + {file = "pyyaml-6.0.3-cp310-cp310-win32.whl", hash = "sha256:28c8d926f98f432f88adc23edf2e6d4921ac26fb084b028c733d01868d19007e"}, + {file = "pyyaml-6.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:bdb2c67c6c1390b63c6ff89f210c8fd09d9a1217a465701eac7316313c915e4c"}, + {file = "pyyaml-6.0.3-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:44edc647873928551a01e7a563d7452ccdebee747728c1080d881d68af7b997e"}, + {file = "pyyaml-6.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:652cb6edd41e718550aad172851962662ff2681490a8a711af6a4d288dd96824"}, + {file = "pyyaml-6.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:10892704fc220243f5305762e276552a0395f7beb4dbf9b14ec8fd43b57f126c"}, + {file = "pyyaml-6.0.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:850774a7879607d3a6f50d36d04f00ee69e7fc816450e5f7e58d7f17f1ae5c00"}, + {file = "pyyaml-6.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b8bb0864c5a28024fac8a632c443c87c5aa6f215c0b126c449ae1a150412f31d"}, + {file = "pyyaml-6.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1d37d57ad971609cf3c53ba6a7e365e40660e3be0e5175fa9f2365a379d6095a"}, + {file = "pyyaml-6.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:37503bfbfc9d2c40b344d06b2199cf0e96e97957ab1c1b546fd4f87e53e5d3e4"}, + {file = "pyyaml-6.0.3-cp311-cp311-win32.whl", hash = "sha256:8098f252adfa6c80ab48096053f512f2321f0b998f98150cea9bd23d83e1467b"}, + {file = "pyyaml-6.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:9f3bfb4965eb874431221a3ff3fdcddc7e74e3b07799e0e84ca4a0f867d449bf"}, + {file = "pyyaml-6.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7f047e29dcae44602496db43be01ad42fc6f1cc0d8cd6c83d342306c32270196"}, + {file = "pyyaml-6.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fc09d0aa354569bc501d4e787133afc08552722d3ab34836a80547331bb5d4a0"}, + {file = "pyyaml-6.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9149cad251584d5fb4981be1ecde53a1ca46c891a79788c0df828d2f166bda28"}, + {file = "pyyaml-6.0.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5fdec68f91a0c6739b380c83b951e2c72ac0197ace422360e6d5a959d8d97b2c"}, + {file = "pyyaml-6.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ba1cc08a7ccde2d2ec775841541641e4548226580ab850948cbfda66a1befcdc"}, + {file = "pyyaml-6.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8dc52c23056b9ddd46818a57b78404882310fb473d63f17b07d5c40421e47f8e"}, + {file = "pyyaml-6.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41715c910c881bc081f1e8872880d3c650acf13dfa8214bad49ed4cede7c34ea"}, + {file = "pyyaml-6.0.3-cp312-cp312-win32.whl", hash = "sha256:96b533f0e99f6579b3d4d4995707cf36df9100d67e0c8303a0c55b27b5f99bc5"}, + {file = "pyyaml-6.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:5fcd34e47f6e0b794d17de1b4ff496c00986e1c83f7ab2fb8fcfe9616ff7477b"}, + {file = "pyyaml-6.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:64386e5e707d03a7e172c0701abfb7e10f0fb753ee1d773128192742712a98fd"}, + {file = "pyyaml-6.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8da9669d359f02c0b91ccc01cac4a67f16afec0dac22c2ad09f46bee0697eba8"}, + {file = "pyyaml-6.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2283a07e2c21a2aa78d9c4442724ec1eb15f5e42a723b99cb3d822d48f5f7ad1"}, + {file = "pyyaml-6.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee2922902c45ae8ccada2c5b501ab86c36525b883eff4255313a253a3160861c"}, + {file = "pyyaml-6.0.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a33284e20b78bd4a18c8c2282d549d10bc8408a2a7ff57653c0cf0b9be0afce5"}, + {file = "pyyaml-6.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0f29edc409a6392443abf94b9cf89ce99889a1dd5376d94316ae5145dfedd5d6"}, + {file = "pyyaml-6.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f7057c9a337546edc7973c0d3ba84ddcdf0daa14533c2065749c9075001090e6"}, + {file = "pyyaml-6.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:eda16858a3cab07b80edaf74336ece1f986ba330fdb8ee0d6c0d68fe82bc96be"}, + {file = "pyyaml-6.0.3-cp313-cp313-win32.whl", hash = "sha256:d0eae10f8159e8fdad514efdc92d74fd8d682c933a6dd088030f3834bc8e6b26"}, + {file = "pyyaml-6.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:79005a0d97d5ddabfeeea4cf676af11e647e41d81c9a7722a193022accdb6b7c"}, + {file = "pyyaml-6.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:5498cd1645aa724a7c71c8f378eb29ebe23da2fc0d7a08071d89469bf1d2defb"}, + {file = "pyyaml-6.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:8d1fab6bb153a416f9aeb4b8763bc0f22a5586065f86f7664fc23339fc1c1fac"}, + {file = "pyyaml-6.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:34d5fcd24b8445fadc33f9cf348c1047101756fd760b4dacb5c3e99755703310"}, + {file = "pyyaml-6.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:501a031947e3a9025ed4405a168e6ef5ae3126c59f90ce0cd6f2bfc477be31b7"}, + {file = "pyyaml-6.0.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b3bc83488de33889877a0f2543ade9f70c67d66d9ebb4ac959502e12de895788"}, + {file = "pyyaml-6.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c458b6d084f9b935061bc36216e8a69a7e293a2f1e68bf956dcd9e6cbcd143f5"}, + {file = "pyyaml-6.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7c6610def4f163542a622a73fb39f534f8c101d690126992300bf3207eab9764"}, + {file = "pyyaml-6.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:5190d403f121660ce8d1d2c1bb2ef1bd05b5f68533fc5c2ea899bd15f4399b35"}, + {file = "pyyaml-6.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:4a2e8cebe2ff6ab7d1050ecd59c25d4c8bd7e6f400f5f82b96557ac0abafd0ac"}, + {file = "pyyaml-6.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:93dda82c9c22deb0a405ea4dc5f2d0cda384168e466364dec6255b293923b2f3"}, + {file = "pyyaml-6.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:02893d100e99e03eda1c8fd5c441d8c60103fd175728e23e431db1b589cf5ab3"}, + {file = "pyyaml-6.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c1ff362665ae507275af2853520967820d9124984e0f7466736aea23d8611fba"}, + {file = "pyyaml-6.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6adc77889b628398debc7b65c073bcb99c4a0237b248cacaf3fe8a557563ef6c"}, + {file = "pyyaml-6.0.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a80cb027f6b349846a3bf6d73b5e95e782175e52f22108cfa17876aaeff93702"}, + {file = "pyyaml-6.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:00c4bdeba853cc34e7dd471f16b4114f4162dc03e6b7afcc2128711f0eca823c"}, + {file = "pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:66e1674c3ef6f541c35191caae2d429b967b99e02040f5ba928632d9a7f0f065"}, + {file = "pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:16249ee61e95f858e83976573de0f5b2893b3677ba71c9dd36b9cf8be9ac6d65"}, + {file = "pyyaml-6.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4ad1906908f2f5ae4e5a8ddfce73c320c2a1429ec52eafd27138b7f1cbe341c9"}, + {file = "pyyaml-6.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:ebc55a14a21cb14062aa4162f906cd962b28e2e9ea38f9b4391244cd8de4ae0b"}, + {file = "pyyaml-6.0.3-cp39-cp39-macosx_10_13_x86_64.whl", hash = "sha256:b865addae83924361678b652338317d1bd7e79b1f4596f96b96c77a5a34b34da"}, + {file = "pyyaml-6.0.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c3355370a2c156cffb25e876646f149d5d68f5e0a3ce86a5084dd0b64a994917"}, + {file = "pyyaml-6.0.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3c5677e12444c15717b902a5798264fa7909e41153cdf9ef7ad571b704a63dd9"}, + {file = "pyyaml-6.0.3-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5ed875a24292240029e4483f9d4a4b8a1ae08843b9c54f43fcc11e404532a8a5"}, + {file = "pyyaml-6.0.3-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0150219816b6a1fa26fb4699fb7daa9caf09eb1999f3b70fb6e786805e80375a"}, + {file = "pyyaml-6.0.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:fa160448684b4e94d80416c0fa4aac48967a969efe22931448d853ada8baf926"}, + {file = "pyyaml-6.0.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:27c0abcb4a5dac13684a37f76e701e054692a9b2d3064b70f5e4eb54810553d7"}, + {file = "pyyaml-6.0.3-cp39-cp39-win32.whl", hash = "sha256:1ebe39cb5fc479422b83de611d14e2c0d3bb2a18bbcb01f229ab3cfbd8fee7a0"}, + {file = "pyyaml-6.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:2e71d11abed7344e42a8849600193d15b6def118602c4c176f748e4583246007"}, + {file = "pyyaml-6.0.3.tar.gz", hash = "sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f"}, +] + +[[package]] +name = "reductoai" +version = "0.16.0" +description = "The official Python library for the reducto API" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "reductoai-0.16.0-py3-none-any.whl", hash = "sha256:2f12974d90c3925d9e90700d7c63fdeee7dc20179974e2047ed2a3e8e75148c4"}, + {file = "reductoai-0.16.0.tar.gz", hash = "sha256:6daa1b0f6825ec437a8e8a43be6b238c3c60ba0c68c6d46e5eaed9800836480c"}, +] + +[package.dependencies] +anyio = ">=3.5.0,<5" +distro = ">=1.7.0,<2" +httpx = ">=0.23.0,<1" +numpy = ">=1.20.0,<3" +pydantic = ">=1.9.0,<3" +sniffio = "*" +typing-extensions = ">=4.10,<5" + +[package.extras] +aiohttp = ["aiohttp", "httpx-aiohttp (>=0.1.9)"] + +[[package]] +name = "requests" +version = "2.32.5" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6"}, + {file = "requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset_normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "rich" +version = "14.3.3" +description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" +optional = false +python-versions = ">=3.8.0" +groups = ["main"] +files = [ + {file = "rich-14.3.3-py3-none-any.whl", hash = "sha256:793431c1f8619afa7d3b52b2cdec859562b950ea0d4b6b505397612db8d5362d"}, + {file = "rich-14.3.3.tar.gz", hash = "sha256:b8daa0b9e4eef54dd8cf7c86c03713f53241884e814f4e2f5fb342fe520f639b"}, +] + +[package.dependencies] +markdown-it-py = ">=2.2.0" +pygments = ">=2.13.0,<3.0.0" + +[package.extras] +jupyter = ["ipywidgets (>=7.5.1,<9)"] + +[[package]] +name = "ruff" +version = "0.15.4" +description = "An extremely fast Python linter and code formatter, written in Rust." +optional = false +python-versions = ">=3.7" +groups = ["dev"] +files = [ + {file = "ruff-0.15.4-py3-none-linux_armv6l.whl", hash = "sha256:a1810931c41606c686bae8b5b9a8072adac2f611bb433c0ba476acba17a332e0"}, + {file = "ruff-0.15.4-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:5a1632c66672b8b4d3e1d1782859e98d6e0b4e70829530666644286600a33992"}, + {file = "ruff-0.15.4-py3-none-macosx_11_0_arm64.whl", hash = "sha256:a4386ba2cd6c0f4ff75252845906acc7c7c8e1ac567b7bc3d373686ac8c222ba"}, + {file = "ruff-0.15.4-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2496488bdfd3732747558b6f95ae427ff066d1fcd054daf75f5a50674411e75"}, + {file = "ruff-0.15.4-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3f1c4893841ff2d54cbda1b2860fa3260173df5ddd7b95d370186f8a5e66a4ac"}, + {file = "ruff-0.15.4-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:820b8766bd65503b6c30aaa6331e8ef3a6e564f7999c844e9a547c40179e440a"}, + {file = "ruff-0.15.4-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9fb74bab47139c1751f900f857fa503987253c3ef89129b24ed375e72873e85"}, + {file = "ruff-0.15.4-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f80c98765949c518142b3a50a5db89343aa90f2c2bf7799de9986498ae6176db"}, + {file = "ruff-0.15.4-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:451a2e224151729b3b6c9ffb36aed9091b2996fe4bdbd11f47e27d8f2e8888ec"}, + {file = "ruff-0.15.4-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:a8f157f2e583c513c4f5f896163a93198297371f34c04220daf40d133fdd4f7f"}, + {file = "ruff-0.15.4-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:917cc68503357021f541e69b35361c99387cdbbf99bd0ea4aa6f28ca99ff5338"}, + {file = "ruff-0.15.4-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:e9737c8161da79fd7cfec19f1e35620375bd8b2a50c3e77fa3d2c16f574105cc"}, + {file = "ruff-0.15.4-py3-none-musllinux_1_2_i686.whl", hash = "sha256:291258c917539e18f6ba40482fe31d6f5ac023994ee11d7bdafd716f2aab8a68"}, + {file = "ruff-0.15.4-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:3f83c45911da6f2cd5936c436cf86b9f09f09165f033a99dcf7477e34041cbc3"}, + {file = "ruff-0.15.4-py3-none-win32.whl", hash = "sha256:65594a2d557d4ee9f02834fcdf0a28daa8b3b9f6cb2cb93846025a36db47ef22"}, + {file = "ruff-0.15.4-py3-none-win_amd64.whl", hash = "sha256:04196ad44f0df220c2ece5b0e959c2f37c777375ec744397d21d15b50a75264f"}, + {file = "ruff-0.15.4-py3-none-win_arm64.whl", hash = "sha256:60d5177e8cfc70e51b9c5fad936c634872a74209f934c1e79107d11787ad5453"}, + {file = "ruff-0.15.4.tar.gz", hash = "sha256:3412195319e42d634470cc97aa9803d07e9d5c9223b99bcb1518f0c725f26ae1"}, +] + +[[package]] +name = "setuptools" +version = "82.0.0" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "setuptools-82.0.0-py3-none-any.whl", hash = "sha256:70b18734b607bd1da571d097d236cfcfacaf01de45717d59e6e04b96877532e0"}, + {file = "setuptools-82.0.0.tar.gz", hash = "sha256:22e0a2d69474c6ae4feb01951cb69d515ed23728cf96d05513d36e42b62b37cb"}, +] + +[package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\"", "ruff (>=0.13.0) ; sys_platform != \"cygwin\""] +core = ["importlib_metadata (>=6) ; python_version < \"3.10\"", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1) ; python_version < \"3.11\"", "wheel (>=0.43.0)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21) ; python_version >= \"3.9\" and sys_platform != \"cygwin\"", "jaraco.envs (>=2.2)", "jaraco.path (>=3.7.2)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf ; sys_platform != \"cygwin\"", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] +type = ["importlib_metadata (>=7.0.2) ; python_version < \"3.10\"", "jaraco.develop (>=7.21) ; sys_platform != \"cygwin\"", "mypy (==1.18.*)", "pytest-mypy"] + +[[package]] +name = "shellingham" +version = "1.5.4" +description = "Tool to Detect Surrounding Shell" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686"}, + {file = "shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de"}, +] + +[[package]] +name = "six" +version = "1.17.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["main"] +files = [ + {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, + {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, +] + +[[package]] +name = "sniffio" +version = "1.3.1" +description = "Sniff out which async library your code is running under" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, + {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, +] + +[[package]] +name = "tenacity" +version = "9.1.4" +description = "Retry code until it succeeds" +optional = false +python-versions = ">=3.10" +groups = ["main"] +files = [ + {file = "tenacity-9.1.4-py3-none-any.whl", hash = "sha256:6095a360c919085f28c6527de529e76a06ad89b23659fa881ae0649b867a9d55"}, + {file = "tenacity-9.1.4.tar.gz", hash = "sha256:adb31d4c263f2bd041081ab33b498309a57c77f9acf2db65aadf0898179cf93a"}, +] + +[package.extras] +doc = ["reno", "sphinx"] +test = ["pytest", "tornado (>=4.5)", "typeguard"] + +[[package]] +name = "tokenizers" +version = "0.22.2" +description = "" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "tokenizers-0.22.2-cp39-abi3-macosx_10_12_x86_64.whl", hash = "sha256:544dd704ae7238755d790de45ba8da072e9af3eea688f698b137915ae959281c"}, + {file = "tokenizers-0.22.2-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:1e418a55456beedca4621dbab65a318981467a2b188e982a23e117f115ce5001"}, + {file = "tokenizers-0.22.2-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2249487018adec45d6e3554c71d46eb39fa8ea67156c640f7513eb26f318cec7"}, + {file = "tokenizers-0.22.2-cp39-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:25b85325d0815e86e0bac263506dd114578953b7b53d7de09a6485e4a160a7dd"}, + {file = "tokenizers-0.22.2-cp39-abi3-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bfb88f22a209ff7b40a576d5324bf8286b519d7358663db21d6246fb17eea2d5"}, + {file = "tokenizers-0.22.2-cp39-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1c774b1276f71e1ef716e5486f21e76333464f47bece56bbd554485982a9e03e"}, + {file = "tokenizers-0.22.2-cp39-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:df6c4265b289083bf710dff49bc51ef252f9d5be33a45ee2bed151114a56207b"}, + {file = "tokenizers-0.22.2-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:369cc9fc8cc10cb24143873a0d95438bb8ee257bb80c71989e3ee290e8d72c67"}, + {file = "tokenizers-0.22.2-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:29c30b83d8dcd061078b05ae0cb94d3c710555fbb44861139f9f83dcca3dc3e4"}, + {file = "tokenizers-0.22.2-cp39-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:37ae80a28c1d3265bb1f22464c856bd23c02a05bb211e56d0c5301a435be6c1a"}, + {file = "tokenizers-0.22.2-cp39-abi3-musllinux_1_2_i686.whl", hash = "sha256:791135ee325f2336f498590eb2f11dc5c295232f288e75c99a36c5dbce63088a"}, + {file = "tokenizers-0.22.2-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:38337540fbbddff8e999d59970f3c6f35a82de10053206a7562f1ea02d046fa5"}, + {file = "tokenizers-0.22.2-cp39-abi3-win32.whl", hash = "sha256:a6bf3f88c554a2b653af81f3204491c818ae2ac6fbc09e76ef4773351292bc92"}, + {file = "tokenizers-0.22.2-cp39-abi3-win_amd64.whl", hash = "sha256:c9ea31edff2968b44a88f97d784c2f16dc0729b8b143ed004699ebca91f05c48"}, + {file = "tokenizers-0.22.2-cp39-abi3-win_arm64.whl", hash = "sha256:9ce725d22864a1e965217204946f830c37876eee3b2ba6fc6255e8e903d5fcbc"}, + {file = "tokenizers-0.22.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:753d47ebd4542742ef9261d9da92cd545b2cacbb48349a1225466745bb866ec4"}, + {file = "tokenizers-0.22.2-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e10bf9113d209be7cd046d40fbabbaf3278ff6d18eb4da4c500443185dc1896c"}, + {file = "tokenizers-0.22.2-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:64d94e84f6660764e64e7e0b22baa72f6cd942279fdbb21d46abd70d179f0195"}, + {file = "tokenizers-0.22.2-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f01a9c019878532f98927d2bacb79bbb404b43d3437455522a00a30718cdedb5"}, + {file = "tokenizers-0.22.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:319f659ee992222f04e58f84cbf407cfa66a65fe3a8de44e8ad2bc53e7d99012"}, + {file = "tokenizers-0.22.2-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1e50f8554d504f617d9e9d6e4c2c2884a12b388a97c5c77f0bc6cf4cd032feee"}, + {file = "tokenizers-0.22.2-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1a62ba2c5faa2dd175aaeed7b15abf18d20266189fb3406c5d0550dd34dd5f37"}, + {file = "tokenizers-0.22.2-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:143b999bdc46d10febb15cbffb4207ddd1f410e2c755857b5a0797961bbdc113"}, + {file = "tokenizers-0.22.2.tar.gz", hash = "sha256:473b83b915e547aa366d1eee11806deaf419e17be16310ac0a14077f1e28f917"}, +] + +[package.dependencies] +huggingface-hub = ">=0.16.4,<2.0" + +[package.extras] +dev = ["tokenizers[testing]"] +docs = ["setuptools-rust", "sphinx", "sphinx-rtd-theme"] +testing = ["datasets", "numpy", "pytest", "pytest-asyncio", "requests", "ruff", "ty"] + +[[package]] +name = "tqdm" +version = "4.67.3" +description = "Fast, Extensible Progress Meter" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "tqdm-4.67.3-py3-none-any.whl", hash = "sha256:ee1e4c0e59148062281c49d80b25b67771a127c85fc9676d3be5f243206826bf"}, + {file = "tqdm-4.67.3.tar.gz", hash = "sha256:7d825f03f89244ef73f1d4ce193cb1774a8179fd96f31d7e1dcde62092b960bb"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[package.extras] +dev = ["nbval", "pytest (>=6)", "pytest-asyncio (>=0.24)", "pytest-cov", "pytest-timeout"] +discord = ["requests"] +notebook = ["ipywidgets (>=6)"] +slack = ["slack-sdk"] +telegram = ["requests"] + +[[package]] +name = "typer" +version = "0.24.1" +description = "Typer, build great CLIs. Easy to code. Based on Python type hints." +optional = false +python-versions = ">=3.10" +groups = ["main"] +files = [ + {file = "typer-0.24.1-py3-none-any.whl", hash = "sha256:112c1f0ce578bfb4cab9ffdabc68f031416ebcc216536611ba21f04e9aa84c9e"}, + {file = "typer-0.24.1.tar.gz", hash = "sha256:e39b4732d65fbdcde189ae76cf7cd48aeae72919dea1fdfc16593be016256b45"}, +] + +[package.dependencies] +annotated-doc = ">=0.0.2" +click = ">=8.2.1" +rich = ">=12.3.0" +shellingham = ">=1.3.0" + +[[package]] +name = "types-requests" +version = "2.32.4.20260107" +description = "Typing stubs for requests" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "types_requests-2.32.4.20260107-py3-none-any.whl", hash = "sha256:b703fe72f8ce5b31ef031264fe9395cac8f46a04661a79f7ed31a80fb308730d"}, + {file = "types_requests-2.32.4.20260107.tar.gz", hash = "sha256:018a11ac158f801bfa84857ddec1650750e393df8a004a8a9ae2a9bec6fcb24f"}, +] + +[package.dependencies] +urllib3 = ">=2" + +[[package]] +name = "typing-extensions" +version = "4.15.0" +description = "Backported and Experimental Type Hints for Python 3.9+" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548"}, + {file = "typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466"}, +] + +[[package]] +name = "typing-inspection" +version = "0.4.2" +description = "Runtime typing introspection tools" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7"}, + {file = "typing_inspection-0.4.2.tar.gz", hash = "sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464"}, +] + +[package.dependencies] +typing-extensions = ">=4.12.0" + +[[package]] +name = "urllib3" +version = "2.6.3" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "urllib3-2.6.3-py3-none-any.whl", hash = "sha256:bf272323e553dfb2e87d9bfd225ca7b0f467b919d7bbd355436d3fd37cb0acd4"}, + {file = "urllib3-2.6.3.tar.gz", hash = "sha256:1b62b6884944a57dbe321509ab94fd4d3b307075e0c2eae991ac71ee15ad38ed"}, +] + +[package.extras] +brotli = ["brotli (>=1.2.0) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=1.2.0.0) ; platform_python_implementation != \"CPython\""] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["backports-zstd (>=1.0.0) ; python_version < \"3.14\""] + +[[package]] +name = "yarl" +version = "1.22.0" +description = "Yet another URL library" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "yarl-1.22.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:c7bd6683587567e5a49ee6e336e0612bec8329be1b7d4c8af5687dcdeb67ee1e"}, + {file = "yarl-1.22.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5cdac20da754f3a723cceea5b3448e1a2074866406adeb4ef35b469d089adb8f"}, + {file = "yarl-1.22.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:07a524d84df0c10f41e3ee918846e1974aba4ec017f990dc735aad487a0bdfdf"}, + {file = "yarl-1.22.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e1b329cb8146d7b736677a2440e422eadd775d1806a81db2d4cded80a48efc1a"}, + {file = "yarl-1.22.0-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:75976c6945d85dbb9ee6308cd7ff7b1fb9409380c82d6119bd778d8fcfe2931c"}, + {file = "yarl-1.22.0-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:80ddf7a5f8c86cb3eb4bc9028b07bbbf1f08a96c5c0bc1244be5e8fefcb94147"}, + {file = "yarl-1.22.0-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d332fc2e3c94dad927f2112395772a4e4fedbcf8f80efc21ed7cdfae4d574fdb"}, + {file = "yarl-1.22.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0cf71bf877efeac18b38d3930594c0948c82b64547c1cf420ba48722fe5509f6"}, + {file = "yarl-1.22.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:663e1cadaddae26be034a6ab6072449a8426ddb03d500f43daf952b74553bba0"}, + {file = "yarl-1.22.0-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:6dcbb0829c671f305be48a7227918cfcd11276c2d637a8033a99a02b67bf9eda"}, + {file = "yarl-1.22.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:f0d97c18dfd9a9af4490631905a3f131a8e4c9e80a39353919e2cfed8f00aedc"}, + {file = "yarl-1.22.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:437840083abe022c978470b942ff832c3940b2ad3734d424b7eaffcd07f76737"}, + {file = "yarl-1.22.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:a899cbd98dce6f5d8de1aad31cb712ec0a530abc0a86bd6edaa47c1090138467"}, + {file = "yarl-1.22.0-cp310-cp310-win32.whl", hash = "sha256:595697f68bd1f0c1c159fcb97b661fc9c3f5db46498043555d04805430e79bea"}, + {file = "yarl-1.22.0-cp310-cp310-win_amd64.whl", hash = "sha256:cb95a9b1adaa48e41815a55ae740cfda005758104049a640a398120bf02515ca"}, + {file = "yarl-1.22.0-cp310-cp310-win_arm64.whl", hash = "sha256:b85b982afde6df99ecc996990d4ad7ccbdbb70e2a4ba4de0aecde5922ba98a0b"}, + {file = "yarl-1.22.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:1ab72135b1f2db3fed3997d7e7dc1b80573c67138023852b6efb336a5eae6511"}, + {file = "yarl-1.22.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:669930400e375570189492dc8d8341301578e8493aec04aebc20d4717f899dd6"}, + {file = "yarl-1.22.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:792a2af6d58177ef7c19cbf0097aba92ca1b9cb3ffdd9c7470e156c8f9b5e028"}, + {file = "yarl-1.22.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3ea66b1c11c9150f1372f69afb6b8116f2dd7286f38e14ea71a44eee9ec51b9d"}, + {file = "yarl-1.22.0-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3e2daa88dc91870215961e96a039ec73e4937da13cf77ce17f9cad0c18df3503"}, + {file = "yarl-1.22.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ba440ae430c00eee41509353628600212112cd5018d5def7e9b05ea7ac34eb65"}, + {file = "yarl-1.22.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:e6438cc8f23a9c1478633d216b16104a586b9761db62bfacb6425bac0a36679e"}, + {file = "yarl-1.22.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4c52a6e78aef5cf47a98ef8e934755abf53953379b7d53e68b15ff4420e6683d"}, + {file = "yarl-1.22.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:3b06bcadaac49c70f4c88af4ffcfbe3dc155aab3163e75777818092478bcbbe7"}, + {file = "yarl-1.22.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:6944b2dc72c4d7f7052683487e3677456050ff77fcf5e6204e98caf785ad1967"}, + {file = "yarl-1.22.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:d5372ca1df0f91a86b047d1277c2aaf1edb32d78bbcefffc81b40ffd18f027ed"}, + {file = "yarl-1.22.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:51af598701f5299012b8416486b40fceef8c26fc87dc6d7d1f6fc30609ea0aa6"}, + {file = "yarl-1.22.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b266bd01fedeffeeac01a79ae181719ff848a5a13ce10075adbefc8f1daee70e"}, + {file = "yarl-1.22.0-cp311-cp311-win32.whl", hash = "sha256:a9b1ba5610a4e20f655258d5a1fdc7ebe3d837bb0e45b581398b99eb98b1f5ca"}, + {file = "yarl-1.22.0-cp311-cp311-win_amd64.whl", hash = "sha256:078278b9b0b11568937d9509b589ee83ef98ed6d561dfe2020e24a9fd08eaa2b"}, + {file = "yarl-1.22.0-cp311-cp311-win_arm64.whl", hash = "sha256:b6a6f620cfe13ccec221fa312139135166e47ae169f8253f72a0abc0dae94376"}, + {file = "yarl-1.22.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e340382d1afa5d32b892b3ff062436d592ec3d692aeea3bef3a5cfe11bbf8c6f"}, + {file = "yarl-1.22.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f1e09112a2c31ffe8d80be1b0988fa6a18c5d5cad92a9ffbb1c04c91bfe52ad2"}, + {file = "yarl-1.22.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:939fe60db294c786f6b7c2d2e121576628468f65453d86b0fe36cb52f987bd74"}, + {file = "yarl-1.22.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e1651bf8e0398574646744c1885a41198eba53dc8a9312b954073f845c90a8df"}, + {file = "yarl-1.22.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:b8a0588521a26bf92a57a1705b77b8b59044cdceccac7151bd8d229e66b8dedb"}, + {file = "yarl-1.22.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:42188e6a615c1a75bcaa6e150c3fe8f3e8680471a6b10150c5f7e83f47cc34d2"}, + {file = "yarl-1.22.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f6d2cb59377d99718913ad9a151030d6f83ef420a2b8f521d94609ecc106ee82"}, + {file = "yarl-1.22.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:50678a3b71c751d58d7908edc96d332af328839eea883bb554a43f539101277a"}, + {file = "yarl-1.22.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1e8fbaa7cec507aa24ea27a01456e8dd4b6fab829059b69844bd348f2d467124"}, + {file = "yarl-1.22.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:433885ab5431bc3d3d4f2f9bd15bfa1614c522b0f1405d62c4f926ccd69d04fa"}, + {file = "yarl-1.22.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:b790b39c7e9a4192dc2e201a282109ed2985a1ddbd5ac08dc56d0e121400a8f7"}, + {file = "yarl-1.22.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:31f0b53913220599446872d757257be5898019c85e7971599065bc55065dc99d"}, + {file = "yarl-1.22.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a49370e8f711daec68d09b821a34e1167792ee2d24d405cbc2387be4f158b520"}, + {file = "yarl-1.22.0-cp312-cp312-win32.whl", hash = "sha256:70dfd4f241c04bd9239d53b17f11e6ab672b9f1420364af63e8531198e3f5fe8"}, + {file = "yarl-1.22.0-cp312-cp312-win_amd64.whl", hash = "sha256:8884d8b332a5e9b88e23f60bb166890009429391864c685e17bd73a9eda9105c"}, + {file = "yarl-1.22.0-cp312-cp312-win_arm64.whl", hash = "sha256:ea70f61a47f3cc93bdf8b2f368ed359ef02a01ca6393916bc8ff877427181e74"}, + {file = "yarl-1.22.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8dee9c25c74997f6a750cd317b8ca63545169c098faee42c84aa5e506c819b53"}, + {file = "yarl-1.22.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:01e73b85a5434f89fc4fe27dcda2aff08ddf35e4d47bbbea3bdcd25321af538a"}, + {file = "yarl-1.22.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:22965c2af250d20c873cdbee8ff958fb809940aeb2e74ba5f20aaf6b7ac8c70c"}, + {file = "yarl-1.22.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b4f15793aa49793ec8d1c708ab7f9eded1aa72edc5174cae703651555ed1b601"}, + {file = "yarl-1.22.0-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e5542339dcf2747135c5c85f68680353d5cb9ffd741c0f2e8d832d054d41f35a"}, + {file = "yarl-1.22.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:5c401e05ad47a75869c3ab3e35137f8468b846770587e70d71e11de797d113df"}, + {file = "yarl-1.22.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:243dda95d901c733f5b59214d28b0120893d91777cb8aa043e6ef059d3cddfe2"}, + {file = "yarl-1.22.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bec03d0d388060058f5d291a813f21c011041938a441c593374da6077fe21b1b"}, + {file = "yarl-1.22.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b0748275abb8c1e1e09301ee3cf90c8a99678a4e92e4373705f2a2570d581273"}, + {file = "yarl-1.22.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:47fdb18187e2a4e18fda2c25c05d8251a9e4a521edaed757fef033e7d8498d9a"}, + {file = "yarl-1.22.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c7044802eec4524fde550afc28edda0dd5784c4c45f0be151a2d3ba017daca7d"}, + {file = "yarl-1.22.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:139718f35149ff544caba20fce6e8a2f71f1e39b92c700d8438a0b1d2a631a02"}, + {file = "yarl-1.22.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e1b51bebd221006d3d2f95fbe124b22b247136647ae5dcc8c7acafba66e5ee67"}, + {file = "yarl-1.22.0-cp313-cp313-win32.whl", hash = "sha256:d3e32536234a95f513bd374e93d717cf6b2231a791758de6c509e3653f234c95"}, + {file = "yarl-1.22.0-cp313-cp313-win_amd64.whl", hash = "sha256:47743b82b76d89a1d20b83e60d5c20314cbd5ba2befc9cda8f28300c4a08ed4d"}, + {file = "yarl-1.22.0-cp313-cp313-win_arm64.whl", hash = "sha256:5d0fcda9608875f7d052eff120c7a5da474a6796fe4d83e152e0e4d42f6d1a9b"}, + {file = "yarl-1.22.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:719ae08b6972befcba4310e49edb1161a88cdd331e3a694b84466bd938a6ab10"}, + {file = "yarl-1.22.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:47d8a5c446df1c4db9d21b49619ffdba90e77c89ec6e283f453856c74b50b9e3"}, + {file = "yarl-1.22.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:cfebc0ac8333520d2d0423cbbe43ae43c8838862ddb898f5ca68565e395516e9"}, + {file = "yarl-1.22.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4398557cbf484207df000309235979c79c4356518fd5c99158c7d38203c4da4f"}, + {file = "yarl-1.22.0-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:2ca6fd72a8cd803be290d42f2dec5cdcd5299eeb93c2d929bf060ad9efaf5de0"}, + {file = "yarl-1.22.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ca1f59c4e1ab6e72f0a23c13fca5430f889634166be85dbf1013683e49e3278e"}, + {file = "yarl-1.22.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:6c5010a52015e7c70f86eb967db0f37f3c8bd503a695a49f8d45700144667708"}, + {file = "yarl-1.22.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9d7672ecf7557476642c88497c2f8d8542f8e36596e928e9bcba0e42e1e7d71f"}, + {file = "yarl-1.22.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:3b7c88eeef021579d600e50363e0b6ee4f7f6f728cd3486b9d0f3ee7b946398d"}, + {file = "yarl-1.22.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:f4afb5c34f2c6fecdcc182dfcfc6af6cccf1aa923eed4d6a12e9d96904e1a0d8"}, + {file = "yarl-1.22.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:59c189e3e99a59cf8d83cbb31d4db02d66cda5a1a4374e8a012b51255341abf5"}, + {file = "yarl-1.22.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:5a3bf7f62a289fa90f1990422dc8dff5a458469ea71d1624585ec3a4c8d6960f"}, + {file = "yarl-1.22.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:de6b9a04c606978fdfe72666fa216ffcf2d1a9f6a381058d4378f8d7b1e5de62"}, + {file = "yarl-1.22.0-cp313-cp313t-win32.whl", hash = "sha256:1834bb90991cc2999f10f97f5f01317f99b143284766d197e43cd5b45eb18d03"}, + {file = "yarl-1.22.0-cp313-cp313t-win_amd64.whl", hash = "sha256:ff86011bd159a9d2dfc89c34cfd8aff12875980e3bd6a39ff097887520e60249"}, + {file = "yarl-1.22.0-cp313-cp313t-win_arm64.whl", hash = "sha256:7861058d0582b847bc4e3a4a4c46828a410bca738673f35a29ba3ca5db0b473b"}, + {file = "yarl-1.22.0-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:34b36c2c57124530884d89d50ed2c1478697ad7473efd59cfd479945c95650e4"}, + {file = "yarl-1.22.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:0dd9a702591ca2e543631c2a017e4a547e38a5c0f29eece37d9097e04a7ac683"}, + {file = "yarl-1.22.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:594fcab1032e2d2cc3321bb2e51271e7cd2b516c7d9aee780ece81b07ff8244b"}, + {file = "yarl-1.22.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f3d7a87a78d46a2e3d5b72587ac14b4c16952dd0887dbb051451eceac774411e"}, + {file = "yarl-1.22.0-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:852863707010316c973162e703bddabec35e8757e67fcb8ad58829de1ebc8590"}, + {file = "yarl-1.22.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:131a085a53bfe839a477c0845acf21efc77457ba2bcf5899618136d64f3303a2"}, + {file = "yarl-1.22.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:078a8aefd263f4d4f923a9677b942b445a2be970ca24548a8102689a3a8ab8da"}, + {file = "yarl-1.22.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bca03b91c323036913993ff5c738d0842fc9c60c4648e5c8d98331526df89784"}, + {file = "yarl-1.22.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:68986a61557d37bb90d3051a45b91fa3d5c516d177dfc6dd6f2f436a07ff2b6b"}, + {file = "yarl-1.22.0-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:4792b262d585ff0dff6bcb787f8492e40698443ec982a3568c2096433660c694"}, + {file = "yarl-1.22.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:ebd4549b108d732dba1d4ace67614b9545b21ece30937a63a65dd34efa19732d"}, + {file = "yarl-1.22.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:f87ac53513d22240c7d59203f25cc3beac1e574c6cd681bbfd321987b69f95fd"}, + {file = "yarl-1.22.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:22b029f2881599e2f1b06f8f1db2ee63bd309e2293ba2d566e008ba12778b8da"}, + {file = "yarl-1.22.0-cp314-cp314-win32.whl", hash = "sha256:6a635ea45ba4ea8238463b4f7d0e721bad669f80878b7bfd1f89266e2ae63da2"}, + {file = "yarl-1.22.0-cp314-cp314-win_amd64.whl", hash = "sha256:0d6e6885777af0f110b0e5d7e5dda8b704efed3894da26220b7f3d887b839a79"}, + {file = "yarl-1.22.0-cp314-cp314-win_arm64.whl", hash = "sha256:8218f4e98d3c10d683584cb40f0424f4b9fd6e95610232dd75e13743b070ee33"}, + {file = "yarl-1.22.0-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:45c2842ff0e0d1b35a6bf1cd6c690939dacb617a70827f715232b2e0494d55d1"}, + {file = "yarl-1.22.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:d947071e6ebcf2e2bee8fce76e10faca8f7a14808ca36a910263acaacef08eca"}, + {file = "yarl-1.22.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:334b8721303e61b00019474cc103bdac3d7b1f65e91f0bfedeec2d56dfe74b53"}, + {file = "yarl-1.22.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1e7ce67c34138a058fd092f67d07a72b8e31ff0c9236e751957465a24b28910c"}, + {file = "yarl-1.22.0-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d77e1b2c6d04711478cb1c4ab90db07f1609ccf06a287d5607fcd90dc9863acf"}, + {file = "yarl-1.22.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c4647674b6150d2cae088fc07de2738a84b8bcedebef29802cf0b0a82ab6face"}, + {file = "yarl-1.22.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:efb07073be061c8f79d03d04139a80ba33cbd390ca8f0297aae9cce6411e4c6b"}, + {file = "yarl-1.22.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e51ac5435758ba97ad69617e13233da53908beccc6cfcd6c34bbed8dcbede486"}, + {file = "yarl-1.22.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:33e32a0dd0c8205efa8e83d04fc9f19313772b78522d1bdc7d9aed706bfd6138"}, + {file = "yarl-1.22.0-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:bf4a21e58b9cde0e401e683ebd00f6ed30a06d14e93f7c8fd059f8b6e8f87b6a"}, + {file = "yarl-1.22.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:e4b582bab49ac33c8deb97e058cd67c2c50dac0dd134874106d9c774fd272529"}, + {file = "yarl-1.22.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:0b5bcc1a9c4839e7e30b7b30dd47fe5e7e44fb7054ec29b5bb8d526aa1041093"}, + {file = "yarl-1.22.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:c0232bce2170103ec23c454e54a57008a9a72b5d1c3105dc2496750da8cfa47c"}, + {file = "yarl-1.22.0-cp314-cp314t-win32.whl", hash = "sha256:8009b3173bcd637be650922ac455946197d858b3630b6d8787aa9e5c4564533e"}, + {file = "yarl-1.22.0-cp314-cp314t-win_amd64.whl", hash = "sha256:9fb17ea16e972c63d25d4a97f016d235c78dd2344820eb35bc034bc32012ee27"}, + {file = "yarl-1.22.0-cp314-cp314t-win_arm64.whl", hash = "sha256:9f6d73c1436b934e3f01df1e1b21ff765cd1d28c77dfb9ace207f746d4610ee1"}, + {file = "yarl-1.22.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:3aa27acb6de7a23785d81557577491f6c38a5209a254d1191519d07d8fe51748"}, + {file = "yarl-1.22.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:af74f05666a5e531289cb1cc9c883d1de2088b8e5b4de48004e5ca8a830ac859"}, + {file = "yarl-1.22.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:62441e55958977b8167b2709c164c91a6363e25da322d87ae6dd9c6019ceecf9"}, + {file = "yarl-1.22.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b580e71cac3f8113d3135888770903eaf2f507e9421e5697d6ee6d8cd1c7f054"}, + {file = "yarl-1.22.0-cp39-cp39-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e81fda2fb4a07eda1a2252b216aa0df23ebcd4d584894e9612e80999a78fd95b"}, + {file = "yarl-1.22.0-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:99b6fc1d55782461b78221e95fc357b47ad98b041e8e20f47c1411d0aacddc60"}, + {file = "yarl-1.22.0-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:088e4e08f033db4be2ccd1f34cf29fe994772fb54cfe004bbf54db320af56890"}, + {file = "yarl-1.22.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2e4e1f6f0b4da23e61188676e3ed027ef0baa833a2e633c29ff8530800edccba"}, + {file = "yarl-1.22.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:84fc3ec96fce86ce5aa305eb4aa9358279d1aa644b71fab7b8ed33fe3ba1a7ca"}, + {file = "yarl-1.22.0-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:5dbeefd6ca588b33576a01b0ad58aa934bc1b41ef89dee505bf2932b22ddffba"}, + {file = "yarl-1.22.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:14291620375b1060613f4aab9ebf21850058b6b1b438f386cc814813d901c60b"}, + {file = "yarl-1.22.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:a4fcfc8eb2c34148c118dfa02e6427ca278bfd0f3df7c5f99e33d2c0e81eae3e"}, + {file = "yarl-1.22.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:029866bde8d7b0878b9c160e72305bbf0a7342bcd20b9999381704ae03308dc8"}, + {file = "yarl-1.22.0-cp39-cp39-win32.whl", hash = "sha256:4dcc74149ccc8bba31ce1944acee24813e93cfdee2acda3c172df844948ddf7b"}, + {file = "yarl-1.22.0-cp39-cp39-win_amd64.whl", hash = "sha256:10619d9fdee46d20edc49d3479e2f8269d0779f1b031e6f7c2aa1c76be04b7ed"}, + {file = "yarl-1.22.0-cp39-cp39-win_arm64.whl", hash = "sha256:dd7afd3f8b0bfb4e0d9fc3c31bfe8a4ec7debe124cfd90619305def3c8ca8cd2"}, + {file = "yarl-1.22.0-py3-none-any.whl", hash = "sha256:1380560bdba02b6b6c90de54133c81c9f2a453dee9912fe58c1dcced1edb7cff"}, + {file = "yarl-1.22.0.tar.gz", hash = "sha256:bebf8557577d4401ba8bd9ff33906f1376c877aa78d1fe216ad01b4d6745af71"}, +] + +[package.dependencies] +idna = ">=2.0" +multidict = ">=4.0" +propcache = ">=0.2.1" + +[metadata] +lock-version = "2.1" +python-versions = "^3.10" +content-hash = "d7b04712b1c27ffca3dbbf659d2b9e450363b9d7c335693787e6c8ea01434533" diff --git a/sdks/guides/python/pyproject.toml b/sdks/guides/python/pyproject.toml new file mode 100644 index 0000000000..6c39c0c9c6 --- /dev/null +++ b/sdks/guides/python/pyproject.toml @@ -0,0 +1,41 @@ +[tool.poetry] +name = "hatchet-guides-python" +version = "0.0.0" +description = "Hatchet guide examples (Python) - docs snippets with integration deps" +package-mode = false + +[tool.poetry.dependencies] +python = "^3.10" +hatchet-sdk = { path = "../../python", develop = true } +# LLM integrations +openai = "^1.0.0" +anthropic = "^0.39.0" +groq = "^0.9.0" +ollama = "^0.3.0" +# Embedding integrations +cohere = "^5.0.0" +# OCR integrations +pytesseract = "^0.3.10" +Pillow = "^10.0.0" +reductoai = "^0.16.0" +# Unstructured excluded - heavy deps (onnx) may not install on all platforms +# Scraper integrations +playwright = "^1.49.0" +firecrawl-py = "^0.0.16" +browserbase = "^1.4.0" + +[tool.poetry.group.dev.dependencies] +ruff = "^0.15.0" + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" + +[tool.ruff] +target-version = "py310" +exclude = ["integrations/ocr_unstructured.py"] # needs unstructured (heavy deps) +line-length = 100 + +[tool.ruff.lint] +select = ["E", "F", "I", "N", "W"] +ignore = ["E501"] # line length - doc examples can be long diff --git a/sdks/guides/python/rag_indexing/embedding_service.py b/sdks/guides/python/rag_indexing/embedding_service.py new file mode 100644 index 0000000000..c081d84e9a --- /dev/null +++ b/sdks/guides/python/rag_indexing/embedding_service.py @@ -0,0 +1,40 @@ +"""Encapsulated embedding service - swap MockEmbeddingService for OpenAI/Cohere in production. + +See docs: /guides/rag-and-indexing +""" + +from abc import ABC, abstractmethod + + +class EmbeddingService(ABC): + """Interface for text embeddings. Implement with OpenAI, Cohere, etc.""" + + @abstractmethod + def embed(self, text: str) -> list[float]: + """Convert text to embedding vector.""" + pass + + +class MockEmbeddingService(EmbeddingService): + """No external API - returns placeholder vectors for demos.""" + + def __init__(self, dim: int = 64) -> None: + self.dim = dim + + def embed(self, text: str) -> list[float]: + return [0.1] * self.dim + + +_embedding_service: EmbeddingService | None = None + + +def get_embedding_service() -> EmbeddingService: + global _embedding_service + if _embedding_service is None: + _embedding_service = MockEmbeddingService() + return _embedding_service + + +def set_embedding_service(service: EmbeddingService) -> None: + global _embedding_service + _embedding_service = service diff --git a/sdks/guides/python/rag_indexing/mock_embedding.py b/sdks/guides/python/rag_indexing/mock_embedding.py new file mode 100644 index 0000000000..1a22b0f560 --- /dev/null +++ b/sdks/guides/python/rag_indexing/mock_embedding.py @@ -0,0 +1,6 @@ +"""Mock embedding client - no external API dependencies.""" + + +def embed(text: str) -> list[float]: + """Mock: return fake embedding vector.""" + return [0.1] * 64 diff --git a/sdks/guides/python/rag_indexing/worker.py b/sdks/guides/python/rag_indexing/worker.py new file mode 100644 index 0000000000..d9d43f084f --- /dev/null +++ b/sdks/guides/python/rag_indexing/worker.py @@ -0,0 +1,81 @@ +from typing import Any + +from hatchet_sdk import Context, Hatchet +from pydantic import BaseModel + +try: + from .embedding_service import get_embedding_service +except ImportError: + from embedding_service import get_embedding_service + +hatchet = Hatchet(debug=True) + + +# > Step 01 Define Workflow +class DocInput(BaseModel): + doc_id: str + content: str + + +rag_wf = hatchet.workflow(name="RAGPipeline", input_validator=DocInput) +# !! + + +# > Step 02 Define Ingest Task +@rag_wf.task() +async def ingest(input: DocInput, ctx: Context) -> dict[str, Any]: + return {"doc_id": input.doc_id, "content": input.content} + + +# !! + + +# > Step 03 Chunk Task +def _chunk_content(content: str, chunk_size: int = 100) -> list[str]: + return [content[i : i + chunk_size] for i in range(0, len(content), chunk_size)] +# !! + + +# > Step 04 Embed Task +@hatchet.task(name="embed-chunk") +async def embed_chunk(input: dict, ctx: Context) -> dict[str, Any]: + embedder = get_embedding_service() + return {"vector": embedder.embed(input["chunk"])} + + +@rag_wf.durable_task(parents=[ingest]) +async def chunk_and_embed(input: DocInput, ctx: Context) -> dict[str, Any]: + ingested = ctx.task_output(ingest) + chunks = [ingested["content"][i : i + 100] for i in range(0, len(ingested["content"]), 100)] + results = await embed_chunk.aio_run_many( + [embed_chunk.create_bulk_run_item(input={"chunk": c}) for c in chunks] + ) + return {"doc_id": ingested["doc_id"], "vectors": [r["vector"] for r in results]} + + +# !! + + +# > Step 05 Query Task +@hatchet.durable_task(name="rag-query") +async def query_task(input: dict, ctx: Context) -> dict[str, Any]: + result = await embed_chunk.aio_run(input={"chunk": input["query"]}) + # Replace with a real vector DB lookup in production + return {"query": input["query"], "vector": result["vector"], "results": []} + + +# !! + + +def main() -> None: + # > Step 06 Run Worker + worker = hatchet.worker( + "rag-worker", + workflows=[rag_wf, embed_chunk, query_task], + ) + worker.start() + # !! + + +if __name__ == "__main__": + main() diff --git a/sdks/guides/python/routing/mock_classifier.py b/sdks/guides/python/routing/mock_classifier.py new file mode 100644 index 0000000000..44c894fa6f --- /dev/null +++ b/sdks/guides/python/routing/mock_classifier.py @@ -0,0 +1,18 @@ +"""Mock classifier - no external API dependencies.""" + + +def mock_classify(message: str) -> str: + lower = message.lower() + if any(w in lower for w in ("bug", "error", "help")): + return "support" + if any(w in lower for w in ("price", "buy", "plan")): + return "sales" + return "other" + + +def mock_reply(message: str, role: str) -> str: + if role == "support": + return f"[Support] I can help with that technical issue. Let me look into: {message}" + if role == "sales": + return f"[Sales] Great question about pricing! Here's what I can tell you about: {message}" + return f"[General] Thanks for reaching out. Regarding: {message}" diff --git a/sdks/guides/python/routing/worker.py b/sdks/guides/python/routing/worker.py new file mode 100644 index 0000000000..8ffbd5c23c --- /dev/null +++ b/sdks/guides/python/routing/worker.py @@ -0,0 +1,60 @@ +from hatchet_sdk import DurableContext, EmptyModel, Hatchet + +try: + from .mock_classifier import mock_classify, mock_reply +except ImportError: + from mock_classifier import mock_classify, mock_reply + +hatchet = Hatchet(debug=True) + + +# > Step 01 Classify Task +@hatchet.durable_task(name="ClassifyMessage") +async def classify_message(input: EmptyModel, ctx: DurableContext) -> dict: + return {"category": mock_classify(input["message"])} +# !! + + +# > Step 02 Specialist Tasks +@hatchet.durable_task(name="HandleSupport") +async def handle_support(input: EmptyModel, ctx: DurableContext) -> dict: + return {"response": mock_reply(input["message"], "support"), "category": "support"} + + +@hatchet.durable_task(name="HandleSales") +async def handle_sales(input: EmptyModel, ctx: DurableContext) -> dict: + return {"response": mock_reply(input["message"], "sales"), "category": "sales"} + + +@hatchet.durable_task(name="HandleDefault") +async def handle_default(input: EmptyModel, ctx: DurableContext) -> dict: + return {"response": mock_reply(input["message"], "other"), "category": "other"} +# !! + + +# > Step 03 Router Task +@hatchet.durable_task(name="MessageRouter", execution_timeout="2m") +async def message_router(input: EmptyModel, ctx: DurableContext) -> dict: + classification = await classify_message.aio_run({"message": input["message"]}) + + if classification["category"] == "support": + return await handle_support.aio_run({"message": input["message"]}) + if classification["category"] == "sales": + return await handle_sales.aio_run({"message": input["message"]}) + return await handle_default.aio_run({"message": input["message"]}) +# !! + + +def main() -> None: + # > Step 04 Run Worker + worker = hatchet.worker( + "routing-worker", + workflows=[classify_message, handle_support, handle_sales, handle_default, message_router], + slots=5, + ) + worker.start() + # !! + + +if __name__ == "__main__": + main() diff --git a/sdks/guides/python/scheduled_jobs/trigger.py b/sdks/guides/python/scheduled_jobs/trigger.py new file mode 100644 index 0000000000..d7471f6b12 --- /dev/null +++ b/sdks/guides/python/scheduled_jobs/trigger.py @@ -0,0 +1,16 @@ +from datetime import datetime, timedelta, timezone + +from hatchet_sdk import Hatchet + +hatchet = Hatchet(debug=True) + + +# > Step 02 Schedule One Time +# Schedule a one-time run at a specific time. +run_at = datetime.now(tz=timezone.utc) + timedelta(hours=1) +hatchet.scheduled.create( + workflow_name="ScheduledWorkflow", + trigger_at=run_at, + input={}, +) +# !! diff --git a/sdks/guides/python/scheduled_jobs/worker.py b/sdks/guides/python/scheduled_jobs/worker.py new file mode 100644 index 0000000000..a649e8ed68 --- /dev/null +++ b/sdks/guides/python/scheduled_jobs/worker.py @@ -0,0 +1,30 @@ +from hatchet_sdk import Context, EmptyModel, Hatchet + +hatchet = Hatchet(debug=True) + + +# > Step 01 Define Cron Task +cron_wf = hatchet.workflow(name="ScheduledWorkflow", on_crons=["0 * * * *"]) + + +@cron_wf.task() +def run_scheduled_job(input: EmptyModel, ctx: Context) -> dict: + """Runs every hour (minute 0).""" + return {"status": "completed", "job": "maintenance"} + + +# !! + + +def main() -> None: + # > Step 03 Run Worker + worker = hatchet.worker( + "scheduled-worker", + workflows=[cron_wf], + ) + worker.start() + # !! + + +if __name__ == "__main__": + main() diff --git a/sdks/guides/python/streaming/client.py b/sdks/guides/python/streaming/client.py new file mode 100644 index 0000000000..685fcaf028 --- /dev/null +++ b/sdks/guides/python/streaming/client.py @@ -0,0 +1,14 @@ +from hatchet_sdk import Hatchet + +hatchet = Hatchet(debug=True) + + +# > Step 03 Subscribe Client +# Client triggers the task and subscribes to the stream. +async def run_and_subscribe(): + run = await hatchet.runs.create(workflow_name="stream_task", input={}) + async for chunk in hatchet.runs.subscribe_to_stream(run.run_id): + print(chunk) + + +# !! diff --git a/sdks/guides/python/streaming/worker.py b/sdks/guides/python/streaming/worker.py new file mode 100644 index 0000000000..c070f2ddee --- /dev/null +++ b/sdks/guides/python/streaming/worker.py @@ -0,0 +1,52 @@ +import asyncio + +from hatchet_sdk import ( + ConcurrencyExpression, + ConcurrencyLimitStrategy, + Context, + EmptyModel, + Hatchet, +) + +hatchet = Hatchet(debug=True) + + +# > Step 01 Define Streaming Task +@hatchet.task( + concurrency=ConcurrencyExpression( + expression="'constant'", + max_runs=1, + limit_strategy=ConcurrencyLimitStrategy.CANCEL_IN_PROGRESS, + ) +) +async def stream_task(input: EmptyModel, ctx: Context) -> dict: + """Emit chunks to subscribers in real-time.""" + for i in range(5): + await ctx.aio_put_stream(f"chunk-{i}") + await asyncio.sleep(0.5) + return {"status": "done"} + + +# !! + + +# > Step 02 Emit Chunks +async def _emit_chunks(ctx: Context) -> None: + for i in range(5): + await ctx.aio_put_stream(f"chunk-{i}") + await asyncio.sleep(0.5) +# !! + + +def main() -> None: + # > Step 04 Run Worker + worker = hatchet.worker( + "streaming-worker", + workflows=[stream_task], + ) + worker.start() + # !! + + +if __name__ == "__main__": + main() diff --git a/sdks/guides/python/web_scraping/mock_scraper.py b/sdks/guides/python/web_scraping/mock_scraper.py new file mode 100644 index 0000000000..0ff89a3b9d --- /dev/null +++ b/sdks/guides/python/web_scraping/mock_scraper.py @@ -0,0 +1,19 @@ +"""Mock scraper - no external API dependencies.""" + +from datetime import datetime, timezone + + +def mock_scrape(url: str) -> dict: + return { + "url": url, + "title": f"Page: {url}", + "content": f"Mock scraped content from {url}. In production, use Firecrawl, Browserbase, or Playwright here.", + "scraped_at": datetime.now(timezone.utc).isoformat(), + } + + +def mock_extract(content: str) -> dict: + return { + "summary": content[:80], + "word_count": str(len(content.split())), + } diff --git a/sdks/guides/python/web_scraping/worker.py b/sdks/guides/python/web_scraping/worker.py new file mode 100644 index 0000000000..c4b8698dee --- /dev/null +++ b/sdks/guides/python/web_scraping/worker.py @@ -0,0 +1,86 @@ +import re + +from hatchet_sdk import Context, EmptyModel, Hatchet +from hatchet_sdk.rate_limit import RateLimit, RateLimitDuration + +try: + from .mock_scraper import mock_scrape +except ImportError: + from mock_scraper import mock_scrape + +hatchet = Hatchet(debug=True) + +scrape_wf = hatchet.workflow(name="ScrapeUrl") +process_wf = hatchet.workflow(name="ProcessContent") + + +# > Step 01 Define Scrape Task +@scrape_wf.task(execution_timeout="2m", retries=2) +async def scrape_url(input: dict, ctx: Context) -> dict: + return mock_scrape(input["url"]) +# !! + + +# > Step 02 Process Content +@process_wf.task() +async def process_content(input: dict, ctx: Context) -> dict: + content = input["content"] + links = re.findall(r"https?://[^\s<>\"']+", content) + summary = content[:200].strip() + word_count = len(content.split()) + return {"summary": summary, "word_count": word_count, "links": links} +# !! + + +# > Step 03 Cron Workflow +cron_wf = hatchet.workflow(name="WebScrapeWorkflow", on_crons=["0 */6 * * *"]) + + +@cron_wf.task() +async def scheduled_scrape(input: EmptyModel, ctx: Context) -> dict: + urls = [ + "https://example.com/pricing", + "https://example.com/blog", + "https://example.com/docs", + ] + + results = [] + for url in urls: + scraped = await scrape_wf.aio_run(input={"url": url}) + processed = await process_wf.aio_run(input={"url": url, "content": scraped["content"]}) + results.append({"url": url, **processed}) + return {"refreshed": len(results), "results": results} +# !! + + +# > Step 04 Rate Limited Scrape +SCRAPE_RATE_LIMIT_KEY = "scrape-rate-limit" + +rate_limited_wf = hatchet.workflow(name="RateLimitedScrape") + + +@rate_limited_wf.task( + execution_timeout="2m", + retries=2, + rate_limits=[RateLimit(static_key=SCRAPE_RATE_LIMIT_KEY, units=1)], +) +async def rate_limited_scrape(input: dict, ctx: Context) -> dict: + return mock_scrape(input["url"]) +# !! + + +def main() -> None: + # > Step 05 Run Worker + hatchet.rate_limits.put(SCRAPE_RATE_LIMIT_KEY, 10, RateLimitDuration.MINUTE) + + worker = hatchet.worker( + "web-scraping-worker", + workflows=[scrape_wf, process_wf, cron_wf, rate_limited_wf], + slots=5, + ) + worker.start() + # !! + + +if __name__ == "__main__": + main() diff --git a/sdks/guides/python/webhook_processing/worker.py b/sdks/guides/python/webhook_processing/worker.py new file mode 100644 index 0000000000..a8ac4510fb --- /dev/null +++ b/sdks/guides/python/webhook_processing/worker.py @@ -0,0 +1,53 @@ +from hatchet_sdk import Context, Hatchet +from pydantic import BaseModel + +hatchet = Hatchet(debug=True) + + +# > Step 01 Define Webhook Task +class WebhookPayload(BaseModel): + event_id: str + type: str + data: dict + + +@hatchet.task( + input_validator=WebhookPayload, + on_events=["webhook:stripe", "webhook:github"], +) +def process_webhook(input: WebhookPayload, ctx: Context) -> dict: + """Process webhook payload. Hatchet acknowledges immediately, processes async.""" + return {"processed": input.event_id, "type": input.type} + + +# !! + + +# > Step 02 Register Webhook +def forward_webhook_to_hatchet(event_key: str, payload: dict) -> None: + """Call this from your webhook endpoint to trigger the task.""" + hatchet.event.push(event_key, payload) +# forward_webhook_to_hatchet("webhook:stripe", {"event_id": "evt_123", "type": "payment", "data": {...}}) +# !! + + +# > Step 03 Process Payload +def _validate_and_process(input: WebhookPayload) -> dict: + if not input.event_id: + raise ValueError("event_id required for deduplication") + return {"processed": input.event_id, "type": input.type} +# !! + + +def main() -> None: + # > Step 04 Run Worker + worker = hatchet.worker( + "webhook-worker", + workflows=[process_webhook], + ) + worker.start() + # !! + + +if __name__ == "__main__": + main() diff --git a/sdks/guides/ruby/.rubocop.yml b/sdks/guides/ruby/.rubocop.yml new file mode 100644 index 0000000000..3823db9538 --- /dev/null +++ b/sdks/guides/ruby/.rubocop.yml @@ -0,0 +1,16 @@ +# Minimal config for guide examples - doc snippets, not production code +AllCops: + NewCops: enable + TargetRubyVersion: 3.1 + +Style/Documentation: + Enabled: false + +Metrics/MethodLength: + Max: 30 + +Metrics/BlockLength: + Max: 25 + +Layout/LineLength: + Max: 140 diff --git a/sdks/guides/ruby/Gemfile b/sdks/guides/ruby/Gemfile new file mode 100644 index 0000000000..a016d54cca --- /dev/null +++ b/sdks/guides/ruby/Gemfile @@ -0,0 +1,14 @@ +# frozen_string_literal: true + +# Hatchet guide examples (Ruby) - docs snippets with integration deps +source 'https://rubygems.org' + +gem 'hatchet-sdk', path: '../../ruby/src' +# LLM / embedding integrations +gem 'openai' +# OCR integration (requires Tesseract binary) +gem 'rtesseract' + +group :development do + gem 'rubocop', '~> 1.21' +end diff --git a/sdks/guides/ruby/Gemfile.lock b/sdks/guides/ruby/Gemfile.lock new file mode 100644 index 0000000000..13217c545b --- /dev/null +++ b/sdks/guides/ruby/Gemfile.lock @@ -0,0 +1,99 @@ +PATH + remote: ../../ruby/src + specs: + hatchet-sdk (0.1.1) + concurrent-ruby (>= 1.1) + faraday (~> 2.0) + faraday-multipart + google-protobuf (~> 4.0) + grpc (~> 1.60) + json (~> 2.0) + marcel + +GEM + remote: https://rubygems.org/ + specs: + addressable (2.8.9) + public_suffix (>= 2.0.2, < 8.0) + ast (2.4.3) + base64 (0.3.0) + bigdecimal (4.0.1) + cgi (0.5.1) + concurrent-ruby (1.3.6) + connection_pool (3.0.2) + faraday (2.14.1) + faraday-net_http (>= 2.0, < 3.5) + json + logger + faraday-multipart (1.2.0) + multipart-post (~> 2.0) + faraday-net_http (3.4.2) + net-http (~> 0.5) + google-protobuf (4.34.0-arm64-darwin) + bigdecimal + rake (~> 13.3) + googleapis-common-protos-types (1.22.0) + google-protobuf (~> 4.26) + grpc (1.78.1-arm64-darwin) + google-protobuf (>= 3.25, < 5.0) + googleapis-common-protos-types (~> 1.0) + json (2.18.1) + json-schema (6.1.0) + addressable (~> 2.8) + bigdecimal (>= 3.1, < 5) + language_server-protocol (3.17.0.5) + lint_roller (1.1.0) + logger (1.7.0) + marcel (1.1.0) + mcp (0.7.1) + json-schema (>= 4.1) + multipart-post (2.4.1) + net-http (0.9.1) + uri (>= 0.11.1) + openai (0.51.0) + base64 + cgi + connection_pool + parallel (1.27.0) + parser (3.3.10.2) + ast (~> 2.4.1) + racc + prism (1.9.0) + public_suffix (7.0.2) + racc (1.8.1) + rainbow (3.1.1) + rake (13.3.1) + regexp_parser (2.11.3) + rtesseract (3.1.4) + rubocop (1.85.0) + json (~> 2.3) + language_server-protocol (~> 3.17.0.2) + lint_roller (~> 1.1.0) + mcp (~> 0.6) + parallel (~> 1.10) + parser (>= 3.3.0.2) + rainbow (>= 2.2.2, < 4.0) + regexp_parser (>= 2.9.3, < 3.0) + rubocop-ast (>= 1.49.0, < 2.0) + ruby-progressbar (~> 1.7) + unicode-display_width (>= 2.4.0, < 4.0) + rubocop-ast (1.49.0) + parser (>= 3.3.7.2) + prism (~> 1.7) + ruby-progressbar (1.13.0) + unicode-display_width (3.2.0) + unicode-emoji (~> 4.1) + unicode-emoji (4.2.0) + uri (1.1.1) + +PLATFORMS + arm64-darwin-24 + +DEPENDENCIES + hatchet-sdk! + openai + rtesseract + rubocop (~> 1.21) + +BUNDLED WITH + 2.6.9 diff --git a/sdks/guides/ruby/ai_agents/mock_agent.rb b/sdks/guides/ruby/ai_agents/mock_agent.rb new file mode 100644 index 0000000000..75a24b39c5 --- /dev/null +++ b/sdks/guides/ruby/ai_agents/mock_agent.rb @@ -0,0 +1,23 @@ +# frozen_string_literal: true + +# Mock LLM and tools - no external API dependencies +@llm_call_count = 0 + +def call_llm(_messages) + @llm_call_count += 1 + if @llm_call_count == 1 + { 'content' => '', 'tool_calls' => [{ 'name' => 'get_weather', 'args' => { 'location' => 'SF' } }], + 'done' => false } + else + { 'content' => "It's 72°F and sunny in SF.", 'tool_calls' => [], 'done' => true } + end +end + +def run_tool(name, args) + if name == 'get_weather' + loc = args['location'] || 'unknown' + "Weather in #{loc}: 72°F, sunny" + else + "Unknown tool: #{name}" + end +end diff --git a/sdks/guides/ruby/ai_agents/worker.rb b/sdks/guides/ruby/ai_agents/worker.rb new file mode 100644 index 0000000000..6a14185420 --- /dev/null +++ b/sdks/guides/ruby/ai_agents/worker.rb @@ -0,0 +1,46 @@ +# frozen_string_literal: true + +require 'hatchet-sdk' +require_relative 'mock_agent' + +HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET) + +# > Step 02 Reasoning Loop +def agent_reasoning_loop(query) + messages = [{ 'role' => 'user', 'content' => query }] + 10.times do + resp = call_llm(messages) + return { 'response' => resp['content'] } if resp['done'] + + (resp['tool_calls'] || []).each do |tc| + result = run_tool(tc['name'], tc['args'] || {}) + messages << { 'role' => 'tool', 'content' => result } + end + end + { 'response' => 'Max iterations reached' } +end +# !! + +# > Step 01 Define Agent Task +AGENT_TASK = HATCHET.durable_task(name: 'ReasoningLoopAgent') do |input, _ctx| + query = input.is_a?(Hash) && input['query'] ? input['query'].to_s : 'Hello' + agent_reasoning_loop(query) +end +# !! + +# > Step 03 Stream Response +STREAMING_AGENT = HATCHET.durable_task(name: 'StreamingAgentTask') do |_input, ctx| + %w[Hello \s world !].each { |t| ctx.put_stream(t) } + { 'done' => true } +end + +# !! + +def main + # > Step 04 Run Worker + worker = HATCHET.worker('agent-worker', slots: 5, workflows: [AGENT_TASK, STREAMING_AGENT]) + worker.start + # !! +end + +main if __FILE__ == $PROGRAM_NAME diff --git a/sdks/guides/ruby/batch_processing/worker.rb b/sdks/guides/ruby/batch_processing/worker.rb new file mode 100644 index 0000000000..e4c9a930a3 --- /dev/null +++ b/sdks/guides/ruby/batch_processing/worker.rb @@ -0,0 +1,35 @@ +# frozen_string_literal: true + +require 'hatchet-sdk' + +HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET) + +# > Step 01 Define Parent Task +BATCH_PARENT_WF = HATCHET.workflow(name: 'BatchParent') +BATCH_CHILD_WF = HATCHET.workflow(name: 'BatchChild') + +BATCH_PARENT_WF.durable_task(:spawn_children) do |input, _ctx| + items = input['items'] || [] + results = BATCH_CHILD_WF.run_many( + items.map { |item_id| BATCH_CHILD_WF.create_bulk_run_item(input: { 'item_id' => item_id }) } + ) + { 'processed' => results.size, 'results' => results } +end + +# !! + +# > Step 03 Process Item +BATCH_CHILD_WF.task(:process_item) do |input, _ctx| + { 'status' => 'done', 'item_id' => input['item_id'] } +end + +# !! + +def main + # > Step 04 Run Worker + worker = HATCHET.worker('batch-worker', slots: 20, workflows: [BATCH_PARENT_WF, BATCH_CHILD_WF]) + worker.start + # !! +end + +main if __FILE__ == $PROGRAM_NAME diff --git a/sdks/guides/ruby/document_processing/mock_ocr.rb b/sdks/guides/ruby/document_processing/mock_ocr.rb new file mode 100644 index 0000000000..3416cfc3b0 --- /dev/null +++ b/sdks/guides/ruby/document_processing/mock_ocr.rb @@ -0,0 +1,6 @@ +# frozen_string_literal: true + +# Mock OCR - no external dependencies +def parse_document(content) + "Parsed text from #{content.size} bytes" +end diff --git a/sdks/guides/ruby/document_processing/worker.rb b/sdks/guides/ruby/document_processing/worker.rb new file mode 100644 index 0000000000..319d2cb045 --- /dev/null +++ b/sdks/guides/ruby/document_processing/worker.rb @@ -0,0 +1,41 @@ +# frozen_string_literal: true + +require 'hatchet-sdk' +require_relative 'mock_ocr' + +HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET) + +# > Step 01 Define DAG +DOC_WF = HATCHET.workflow(name: 'DocumentPipeline') + +INGEST = DOC_WF.task(:ingest) do |input, _ctx| + { 'doc_id' => input['doc_id'], 'content' => input['content'] } +end + +# !! + +# > Step 02 Parse Stage +PARSE = DOC_WF.task(:parse, parents: [INGEST]) do |input, ctx| + ingested = ctx.task_output(INGEST) + text = parse_document(ingested['content']) + { 'doc_id' => input['doc_id'], 'text' => text } +end + +# !! + +# > Step 03 Extract Stage +DOC_WF.task(:extract, parents: [PARSE]) do |_input, ctx| + parsed = ctx.task_output(PARSE) + { 'doc_id' => parsed['doc_id'], 'entities' => %w[entity1 entity2] } +end + +# !! + +def main + # > Step 04 Run Worker + worker = HATCHET.worker('document-worker', workflows: [DOC_WF]) + worker.start + # !! +end + +main if __FILE__ == $PROGRAM_NAME diff --git a/sdks/guides/ruby/evaluator_optimizer/mock_llm.rb b/sdks/guides/ruby/evaluator_optimizer/mock_llm.rb new file mode 100644 index 0000000000..3c3c37781f --- /dev/null +++ b/sdks/guides/ruby/evaluator_optimizer/mock_llm.rb @@ -0,0 +1,20 @@ +# frozen_string_literal: true + +@generate_count = 0 + +def mock_generate(_prompt) + @generate_count += 1 + if @generate_count == 1 + 'Check out our product! Buy now!' + else + 'Discover how our tool saves teams 10 hours/week. Try it free.' + end +end + +def mock_evaluate(draft) + if draft.length < 40 + { 'score' => 0.4, 'feedback' => 'Too short and pushy. Add a specific benefit and soften the CTA.' } + else + { 'score' => 0.9, 'feedback' => 'Clear value prop, appropriate tone.' } + end +end diff --git a/sdks/guides/ruby/evaluator_optimizer/worker.rb b/sdks/guides/ruby/evaluator_optimizer/worker.rb new file mode 100644 index 0000000000..64493188c7 --- /dev/null +++ b/sdks/guides/ruby/evaluator_optimizer/worker.rb @@ -0,0 +1,62 @@ +# frozen_string_literal: true + +require 'hatchet-sdk' +require_relative 'mock_llm' + +HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET) + +GENERATOR_WF = HATCHET.workflow(name: 'GenerateDraft') +EVALUATOR_WF = HATCHET.workflow(name: 'EvaluateDraft') + +# > Step 01 Define Tasks +GENERATOR_WF.task(:generate_draft) do |input, _ctx| + prompt = if input['feedback'] + "Improve this draft.\n\nDraft: #{input['previous_draft']}\nFeedback: #{input['feedback']}" + else + "Write a social media post about \"#{input['topic']}\" for #{input['audience']}. Under 100 words." + end + { 'draft' => mock_generate(prompt) } +end + +EVALUATOR_WF.task(:evaluate_draft) do |input, _ctx| + mock_evaluate(input['draft']) +end +# !! + +# > Step 02 Optimization Loop +OPTIMIZER_TASK = HATCHET.durable_task(name: 'EvaluatorOptimizer', execution_timeout: '5m') do |input, _ctx| + max_iterations = 3 + threshold = 0.8 + draft = '' + feedback = '' + + max_iterations.times do |i| + generated = GENERATOR_WF.run( + 'topic' => input['topic'], 'audience' => input['audience'], + 'previous_draft' => draft.empty? ? nil : draft, + 'feedback' => feedback.empty? ? nil : feedback + ) + draft = generated['draft'] + + evaluation = EVALUATOR_WF.run( + 'draft' => draft, 'topic' => input['topic'], 'audience' => input['audience'] + ) + + next { 'draft' => draft, 'iterations' => i + 1, 'score' => evaluation['score'] } if evaluation['score'] >= threshold + + feedback = evaluation['feedback'] + end + + { 'draft' => draft, 'iterations' => max_iterations, 'score' => -1 } +end +# !! + +def main + # > Step 03 Run Worker + worker = HATCHET.worker('evaluator-optimizer-worker', slots: 5, + workflows: [GENERATOR_WF, EVALUATOR_WF, OPTIMIZER_TASK]) + worker.start + # !! +end + +main if __FILE__ == $PROGRAM_NAME diff --git a/sdks/guides/ruby/event_driven/trigger.rb b/sdks/guides/ruby/event_driven/trigger.rb new file mode 100644 index 0000000000..6df423b98e --- /dev/null +++ b/sdks/guides/ruby/event_driven/trigger.rb @@ -0,0 +1,10 @@ +# frozen_string_literal: true + +require 'hatchet-sdk' + +HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET) + +# > Step 03 Push Event +# Push an event to trigger the workflow. Use the same key as on_events. +HATCHET.event.push('order:created', 'message' => 'Order #1234', 'source' => 'webhook') +# !! diff --git a/sdks/guides/ruby/event_driven/worker.rb b/sdks/guides/ruby/event_driven/worker.rb new file mode 100644 index 0000000000..4c4ad26b74 --- /dev/null +++ b/sdks/guides/ruby/event_driven/worker.rb @@ -0,0 +1,30 @@ +# frozen_string_literal: true + +require 'hatchet-sdk' + +HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET) + +# > Step 01 Define Event Task +EVENT_WF = HATCHET.workflow(name: 'EventDrivenWorkflow', on_events: ['order:created', 'user:signup']) + +EVENT_WF.task(:process_event) do |input, _ctx| + { 'processed' => input['message'], 'source' => input['source'] || 'api' } +end + +# !! + +# > Step 02 Register Event Trigger +# Push an event from your app to trigger the workflow. Use the same key as on_events. +def push_order_event + HATCHET.event.push('order:created', 'message' => 'Order #1234', 'source' => 'webhook') +end +# !! + +def main + # > Step 04 Run Worker + worker = HATCHET.worker('event-driven-worker', workflows: [EVENT_WF]) + worker.start + # !! +end + +main if __FILE__ == $PROGRAM_NAME diff --git a/sdks/guides/ruby/human_in_the_loop/trigger.rb b/sdks/guides/ruby/human_in_the_loop/trigger.rb new file mode 100644 index 0000000000..e6648ffe35 --- /dev/null +++ b/sdks/guides/ruby/human_in_the_loop/trigger.rb @@ -0,0 +1,18 @@ +# frozen_string_literal: true + +require 'hatchet-sdk' + +HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET) + +# > Step 03 Push Approval Event +# Include the run_id so the event matches the specific task waiting for it. +def push_approval(run_id:, approved:, reason: '') + HATCHET.events.create( + key: 'approval:decision', + data: { 'runId' => run_id, 'approved' => approved, 'reason' => reason } + ) +end + +# Approve: push_approval(run_id: 'run-id-from-ui', approved: true) +# Reject: push_approval(run_id: 'run-id-from-ui', approved: false, reason: "needs review") +# !! diff --git a/sdks/guides/ruby/human_in_the_loop/worker.rb b/sdks/guides/ruby/human_in_the_loop/worker.rb new file mode 100644 index 0000000000..d4b41b78e3 --- /dev/null +++ b/sdks/guides/ruby/human_in_the_loop/worker.rb @@ -0,0 +1,44 @@ +# frozen_string_literal: true + +require 'hatchet-sdk' + +HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET) + +APPROVAL_EVENT_KEY = 'approval:decision' + +# > Step 02 Wait For Event +def wait_for_approval(ctx) + run_id = ctx.workflow_run_id + ctx.wait_for( + 'approval', + Hatchet::UserEventCondition.new( + event_key: APPROVAL_EVENT_KEY, + expression: "input.runId == '#{run_id}'" + ) + ) +end +# !! + +# > Step 01 Define Approval Task +APPROVAL_TASK = HATCHET.durable_task(name: 'ApprovalTask') do |_input, ctx| + proposed_action = { 'action' => 'send_email', 'to' => 'user@example.com' } + approval = wait_for_approval(ctx) + if approval['approved'] + { 'status' => 'approved', 'action' => proposed_action } + else + { 'status' => 'rejected', 'reason' => approval['reason'].to_s } + end +end +# !! + +def main + # > Step 04 Run Worker + worker = HATCHET.worker( + 'human-in-the-loop-worker', + workflows: [APPROVAL_TASK] + ) + worker.start + # !! +end + +main if __FILE__ == $PROGRAM_NAME diff --git a/sdks/guides/ruby/integrations/embedding_openai.rb b/sdks/guides/ruby/integrations/embedding_openai.rb new file mode 100644 index 0000000000..7d4aeb687c --- /dev/null +++ b/sdks/guides/ruby/integrations/embedding_openai.rb @@ -0,0 +1,15 @@ +# frozen_string_literal: true + +# Third-party integration - requires: bundle add openai +# See: /guides/rag-and-indexing + +require 'openai' + +OpenAI::Client.new + +# > OpenAI embedding usage +def embed(text) + response = client.embeddings(parameters: { model: 'text-embedding-3-small', input: text }) + response.dig('data', 0, 'embedding') || [] +end +# !! diff --git a/sdks/guides/ruby/integrations/llm_openai.rb b/sdks/guides/ruby/integrations/llm_openai.rb new file mode 100644 index 0000000000..7449c47d6b --- /dev/null +++ b/sdks/guides/ruby/integrations/llm_openai.rb @@ -0,0 +1,34 @@ +# frozen_string_literal: true + +# Third-party integration - requires: bundle add openai +# See: /guides/ai-agents + +require 'openai' +require 'json' + +OpenAI::Client.new + +# > OpenAI usage +def complete(messages) + response = client.chat( + parameters: { + model: 'gpt-4o-mini', + messages: messages, + tool_choice: 'auto', + tools: [{ + type: 'function', + function: { + name: 'get_weather', + description: 'Get weather for a location', + parameters: { type: 'object', properties: { location: { type: 'string' } }, required: ['location'] } + } + }] + } + ) + msg = response.dig('choices', 0, 'message') + tool_calls = msg['tool_calls']&.map do |tc| + { 'name' => tc.dig('function', 'name'), 'args' => JSON.parse(tc.dig('function', 'arguments') || '{}') } + end || [] + { 'content' => msg['content'] || '', 'tool_calls' => tool_calls, 'done' => tool_calls.empty? } +end +# !! diff --git a/sdks/guides/ruby/integrations/ocr_tesseract.rb b/sdks/guides/ruby/integrations/ocr_tesseract.rb new file mode 100644 index 0000000000..c889fa548c --- /dev/null +++ b/sdks/guides/ruby/integrations/ocr_tesseract.rb @@ -0,0 +1,12 @@ +# frozen_string_literal: true + +# Third-party integration - requires: bundle add rtesseract; install Tesseract binary +# See: /guides/document-processing + +require 'rtesseract' + +# > Tesseract usage +def parse_document(content) + RTesseract.new(nil, data: content).to_s +end +# !! diff --git a/sdks/guides/ruby/llm_pipelines/mock_llm.rb b/sdks/guides/ruby/llm_pipelines/mock_llm.rb new file mode 100644 index 0000000000..d9b7e364a8 --- /dev/null +++ b/sdks/guides/ruby/llm_pipelines/mock_llm.rb @@ -0,0 +1,6 @@ +# frozen_string_literal: true + +# Mock LLM - no external API dependencies +def generate(prompt) + { 'content' => "Generated for: #{prompt[0, 50]}...", 'valid' => true } +end diff --git a/sdks/guides/ruby/llm_pipelines/worker.rb b/sdks/guides/ruby/llm_pipelines/worker.rb new file mode 100644 index 0000000000..99586613ea --- /dev/null +++ b/sdks/guides/ruby/llm_pipelines/worker.rb @@ -0,0 +1,42 @@ +# frozen_string_literal: true + +require 'hatchet-sdk' +require_relative 'mock_llm' + +HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET) + +# > Step 01 Define Pipeline +LLM_WF = HATCHET.workflow(name: 'LLMPipeline') + +PROMPT_TASK = LLM_WF.task(:prompt_task) do |input, _ctx| + { 'prompt' => input['prompt'] } +end + +# !! + +# > Step 02 Prompt Task +def build_prompt(user_input, context = '') + base = "Process the following: #{user_input}" + context.empty? ? base : "#{base}\nContext: #{context}" +end +# !! + +# > Step 03 Validate Task +LLM_WF.task(:generate_task, parents: [PROMPT_TASK]) do |_input, ctx| + prev = ctx.task_output(PROMPT_TASK) + output = generate(prev['prompt']) + raise 'Validation failed' unless output['valid'] + + output +end + +# !! + +def main + # > Step 04 Run Worker + worker = HATCHET.worker('llm-pipeline-worker', workflows: [LLM_WF]) + worker.start + # !! +end + +main if __FILE__ == $PROGRAM_NAME diff --git a/sdks/guides/ruby/multi_agent/mock_llm.rb b/sdks/guides/ruby/multi_agent/mock_llm.rb new file mode 100644 index 0000000000..6d60c08d85 --- /dev/null +++ b/sdks/guides/ruby/multi_agent/mock_llm.rb @@ -0,0 +1,21 @@ +# frozen_string_literal: true + +@orchestrator_call_count = 0 + +def mock_orchestrator_llm(_messages) + @orchestrator_call_count += 1 + case @orchestrator_call_count + when 1 + { 'done' => false, 'content' => '', + 'tool_call' => { 'name' => 'research', 'args' => { 'task' => 'Find key facts about the topic' } } } + when 2 + { 'done' => false, 'content' => '', + 'tool_call' => { 'name' => 'writing', 'args' => { 'task' => 'Write a summary from the research' } } } + else + { 'done' => true, 'content' => 'Here is the final report combining research and writing.' } + end +end + +def mock_specialist_llm(task, role) + "[#{role}] Completed: #{task}" +end diff --git a/sdks/guides/ruby/multi_agent/worker.rb b/sdks/guides/ruby/multi_agent/worker.rb new file mode 100644 index 0000000000..1f9e3f7b1f --- /dev/null +++ b/sdks/guides/ruby/multi_agent/worker.rb @@ -0,0 +1,64 @@ +# frozen_string_literal: true + +require 'hatchet-sdk' +require_relative 'mock_llm' + +HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET) + +# > Step 01 Specialist Agents +RESEARCH_TASK = HATCHET.durable_task(name: 'ResearchSpecialist', execution_timeout: '3m') do |input, _ctx| + { 'result' => mock_specialist_llm(input['task'], 'research') } +end + +WRITING_TASK = HATCHET.durable_task(name: 'WritingSpecialist', execution_timeout: '2m') do |input, _ctx| + { 'result' => mock_specialist_llm(input['task'], 'writing') } +end + +CODE_TASK = HATCHET.durable_task(name: 'CodeSpecialist', execution_timeout: '2m') do |input, _ctx| + { 'result' => mock_specialist_llm(input['task'], 'code') } +end +# !! + +SPECIALISTS = { + 'research' => RESEARCH_TASK, + 'writing' => WRITING_TASK, + 'code' => CODE_TASK +}.freeze + +# > Step 02 Orchestrator Loop +ORCHESTRATOR = HATCHET.durable_task(name: 'MultiAgentOrchestrator', execution_timeout: '15m') do |input, _ctx| + messages = [{ 'role' => 'user', 'content' => input['goal'] }] + + result = nil + 10.times do + response = mock_orchestrator_llm(messages) + + if response['done'] + result = { 'result' => response['content'] } + break + end + + specialist = SPECIALISTS[response['tool_call']['name']] + raise "Unknown specialist: #{response['tool_call']['name']}" unless specialist + + specialist_result = specialist.run( + 'task' => response['tool_call']['args']['task'], + 'context' => messages.map { |m| m['content'] }.join("\n") + ) + + messages << { 'role' => 'assistant', 'content' => "Called #{response['tool_call']['name']}" } + messages << { 'role' => 'tool', 'content' => specialist_result['result'] } + end + + result || { 'result' => 'Max iterations reached' } +end +# !! + +def main + # > Step 03 Run Worker + worker = HATCHET.worker('multi-agent-worker', slots: 10, workflows: [RESEARCH_TASK, WRITING_TASK, CODE_TASK, ORCHESTRATOR]) + worker.start + # !! +end + +main if __FILE__ == $PROGRAM_NAME diff --git a/sdks/guides/ruby/parallelization/mock_llm.rb b/sdks/guides/ruby/parallelization/mock_llm.rb new file mode 100644 index 0000000000..58b8b71301 --- /dev/null +++ b/sdks/guides/ruby/parallelization/mock_llm.rb @@ -0,0 +1,18 @@ +# frozen_string_literal: true + +def mock_generate_content(message) + "Here is a helpful response to: #{message}" +end + +def mock_safety_check(message) + if message.downcase.include?('unsafe') + { 'safe' => false, 'reason' => 'Content flagged as potentially unsafe.' } + else + { 'safe' => true, 'reason' => 'Content is appropriate.' } + end +end + +def mock_evaluate_content(content) + score = content.length > 20 ? 0.85 : 0.3 + { 'score' => score, 'approved' => score >= 0.7 } +end diff --git a/sdks/guides/ruby/parallelization/worker.rb b/sdks/guides/ruby/parallelization/worker.rb new file mode 100644 index 0000000000..b05d944bb5 --- /dev/null +++ b/sdks/guides/ruby/parallelization/worker.rb @@ -0,0 +1,61 @@ +# frozen_string_literal: true + +require 'hatchet-sdk' +require_relative 'mock_llm' + +HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET) + +CONTENT_WF = HATCHET.workflow(name: 'GenerateContent') +SAFETY_WF = HATCHET.workflow(name: 'SafetyCheck') +EVALUATOR_WF = HATCHET.workflow(name: 'EvaluateContent') + +# > Step 01 Parallel Tasks +CONTENT_WF.task(:generate_content) do |input, _ctx| + { 'content' => mock_generate_content(input['message']) } +end + +SAFETY_WF.task(:safety_check) do |input, _ctx| + mock_safety_check(input['message']) +end + +EVALUATOR_WF.task(:evaluate_content) do |input, _ctx| + mock_evaluate_content(input['content']) +end +# !! + +# > Step 02 Sectioning +SECTIONING_TASK = HATCHET.durable_task(name: 'ParallelSectioning', execution_timeout: '2m') do |input, _ctx| + threads = [] + threads << Thread.new { CONTENT_WF.run('message' => input['message']) } + threads << Thread.new { SAFETY_WF.run('message' => input['message']) } + content_result, safety_result = threads.map(&:value) + + if safety_result['safe'] + { 'blocked' => false, 'content' => content_result['content'] } + else + { 'blocked' => true, 'reason' => safety_result['reason'] } + end +end +# !! + +# > Step 03 Voting +VOTING_TASK = HATCHET.durable_task(name: 'ParallelVoting', execution_timeout: '3m') do |input, _ctx| + threads = 3.times.map { Thread.new { EVALUATOR_WF.run('content' => input['content']) } } + votes = threads.map(&:value) + + approvals = votes.count { |v| v['approved'] } + avg_score = votes.sum { |v| v['score'] } / votes.size.to_f + + { 'approved' => approvals >= 2, 'average_score' => avg_score, 'votes' => votes.size } +end +# !! + +def main + # > Step 04 Run Worker + worker = HATCHET.worker('parallelization-worker', slots: 10, + workflows: [CONTENT_WF, SAFETY_WF, EVALUATOR_WF, SECTIONING_TASK, VOTING_TASK]) + worker.start + # !! +end + +main if __FILE__ == $PROGRAM_NAME diff --git a/sdks/guides/ruby/rag_indexing/mock_embedding.rb b/sdks/guides/ruby/rag_indexing/mock_embedding.rb new file mode 100644 index 0000000000..aa8cad26f0 --- /dev/null +++ b/sdks/guides/ruby/rag_indexing/mock_embedding.rb @@ -0,0 +1,6 @@ +# frozen_string_literal: true + +# Mock embedding - no external API dependencies +def embed(_text) + [0.1] * 64 +end diff --git a/sdks/guides/ruby/rag_indexing/worker.rb b/sdks/guides/ruby/rag_indexing/worker.rb new file mode 100644 index 0000000000..4a96f64716 --- /dev/null +++ b/sdks/guides/ruby/rag_indexing/worker.rb @@ -0,0 +1,57 @@ +# frozen_string_literal: true + +require 'hatchet-sdk' +require_relative 'mock_embedding' + +HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET) + +# > Step 01 Define Workflow +RAG_WF = HATCHET.workflow(name: 'RAGPipeline') +# !! + +# > Step 02 Define Ingest Task +INGEST = RAG_WF.task(:ingest) do |input, _ctx| + { 'doc_id' => input['doc_id'], 'content' => input['content'] } +end + +# !! + +# > Step 03 Chunk Task +def chunk_content(content, chunk_size = 100) + content.scan(/.{1,#{chunk_size}}/) +end +# !! + +# > Step 04 Embed Task +EMBED_CHUNK_TASK = HATCHET.task(name: 'embed-chunk') do |input, _ctx| + { 'vector' => embed(input['chunk']) } +end + +RAG_WF.durable_task(:chunk_and_embed, parents: [INGEST]) do |_input, ctx| + ingested = ctx.task_output(INGEST) + content = ingested['content'] + chunks = content.scan(/.{1,100}/) + results = EMBED_CHUNK_TASK.run_many( + chunks.map { |c| EMBED_CHUNK_TASK.create_bulk_run_item(input: { 'chunk' => c }) } + ) + { 'doc_id' => ingested['doc_id'], 'vectors' => results.map { |r| r['vector'] } } +end + +# !! + +# > Step 05 Query Task +QUERY_TASK = HATCHET.durable_task(name: 'rag-query') do |input, _ctx| + result = EMBED_CHUNK_TASK.run('chunk' => input['query']) + # Replace with a real vector DB lookup in production + { 'query' => input['query'], 'vector' => result['vector'], 'results' => [] } +end +# !! + +def main + # > Step 06 Run Worker + worker = HATCHET.worker('rag-worker', workflows: [RAG_WF, EMBED_CHUNK_TASK, QUERY_TASK]) + worker.start + # !! +end + +main if __FILE__ == $PROGRAM_NAME diff --git a/sdks/guides/ruby/routing/mock_classifier.rb b/sdks/guides/ruby/routing/mock_classifier.rb new file mode 100644 index 0000000000..b0979ce58a --- /dev/null +++ b/sdks/guides/ruby/routing/mock_classifier.rb @@ -0,0 +1,20 @@ +# frozen_string_literal: true + +def mock_classify(message) + lower = message.downcase + return 'support' if %w[bug error help].any? { |w| lower.include?(w) } + return 'sales' if %w[price buy plan].any? { |w| lower.include?(w) } + + 'other' +end + +def mock_reply(message, role) + case role + when 'support' + "[Support] I can help with that technical issue. Let me look into: #{message}" + when 'sales' + "[Sales] Great question about pricing! Here's what I can tell you about: #{message}" + else + "[General] Thanks for reaching out. Regarding: #{message}" + end +end diff --git a/sdks/guides/ruby/routing/worker.rb b/sdks/guides/ruby/routing/worker.rb new file mode 100644 index 0000000000..ab537f7f21 --- /dev/null +++ b/sdks/guides/ruby/routing/worker.rb @@ -0,0 +1,51 @@ +# frozen_string_literal: true + +require 'hatchet-sdk' +require_relative 'mock_classifier' + +HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET) + +# > Step 01 Classify Task +CLASSIFY_TASK = HATCHET.durable_task(name: 'ClassifyMessage') do |input, _ctx| + { 'category' => mock_classify(input['message']) } +end +# !! + +# > Step 02 Specialist Tasks +SUPPORT_TASK = HATCHET.durable_task(name: 'HandleSupport') do |input, _ctx| + { 'response' => mock_reply(input['message'], 'support'), 'category' => 'support' } +end + +SALES_TASK = HATCHET.durable_task(name: 'HandleSales') do |input, _ctx| + { 'response' => mock_reply(input['message'], 'sales'), 'category' => 'sales' } +end + +DEFAULT_TASK = HATCHET.durable_task(name: 'HandleDefault') do |input, _ctx| + { 'response' => mock_reply(input['message'], 'other'), 'category' => 'other' } +end +# !! + +# > Step 03 Router Task +ROUTER_TASK = HATCHET.durable_task(name: 'MessageRouter', execution_timeout: '2m') do |input, _ctx| + classification = CLASSIFY_TASK.run('message' => input['message']) + + case classification['category'] + when 'support' + SUPPORT_TASK.run('message' => input['message']) + when 'sales' + SALES_TASK.run('message' => input['message']) + else + DEFAULT_TASK.run('message' => input['message']) + end +end +# !! + +def main + # > Step 04 Run Worker + worker = HATCHET.worker('routing-worker', slots: 5, + workflows: [CLASSIFY_TASK, SUPPORT_TASK, SALES_TASK, DEFAULT_TASK, ROUTER_TASK]) + worker.start + # !! +end + +main if __FILE__ == $PROGRAM_NAME diff --git a/sdks/guides/ruby/scheduled_jobs/trigger.rb b/sdks/guides/ruby/scheduled_jobs/trigger.rb new file mode 100644 index 0000000000..8ec97fc3fa --- /dev/null +++ b/sdks/guides/ruby/scheduled_jobs/trigger.rb @@ -0,0 +1,11 @@ +# frozen_string_literal: true + +require 'hatchet-sdk' + +HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET) + +# > Step 02 Schedule One Time +# Schedule a one-time run at a specific time. +run_at = Time.now + 3600 +HATCHET.scheduled.create(workflow_name: 'ScheduledWorkflow', trigger_at: run_at, input: {}) +# !! diff --git a/sdks/guides/ruby/scheduled_jobs/worker.rb b/sdks/guides/ruby/scheduled_jobs/worker.rb new file mode 100644 index 0000000000..a5f040c07b --- /dev/null +++ b/sdks/guides/ruby/scheduled_jobs/worker.rb @@ -0,0 +1,23 @@ +# frozen_string_literal: true + +require 'hatchet-sdk' + +HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET) + +# > Step 01 Define Cron Task +CRON_WF = HATCHET.workflow(name: 'ScheduledWorkflow', on_crons: ['0 * * * *']) + +CRON_WF.task(:run_scheduled_job) do |_input, _ctx| + { 'status' => 'completed', 'job' => 'maintenance' } +end + +# !! + +def main + # > Step 03 Run Worker + worker = HATCHET.worker('scheduled-worker', workflows: [CRON_WF]) + worker.start + # !! +end + +main if __FILE__ == $PROGRAM_NAME diff --git a/sdks/guides/ruby/streaming/client.rb b/sdks/guides/ruby/streaming/client.rb new file mode 100644 index 0000000000..ce006f9adb --- /dev/null +++ b/sdks/guides/ruby/streaming/client.rb @@ -0,0 +1,15 @@ +# frozen_string_literal: true + +require 'hatchet-sdk' + +HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET) + +# > Step 03 Subscribe Client +# Client triggers the task and subscribes to the stream. +def run_and_subscribe + run = HATCHET.runs.create(workflow_name: 'stream-example', input: {}) + HATCHET.runs.subscribe_to_stream(run.run_id) do |chunk| + puts chunk + end +end +# !! diff --git a/sdks/guides/ruby/streaming/worker.rb b/sdks/guides/ruby/streaming/worker.rb new file mode 100644 index 0000000000..6274062ac6 --- /dev/null +++ b/sdks/guides/ruby/streaming/worker.rb @@ -0,0 +1,34 @@ +# frozen_string_literal: true + +require 'hatchet-sdk' + +HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET) + +# > Step 01 Define Streaming Task +STREAM_TASK = HATCHET.task(name: 'stream-example') do |_input, ctx| + 5.times do |i| + ctx.put_stream("chunk-#{i}") + sleep 0.5 + end + { 'status' => 'done' } +end + +# !! + +# > Step 02 Emit Chunks +def emit_chunks(ctx) + 5.times do |i| + ctx.put_stream("chunk-#{i}") + sleep 0.5 + end +end +# !! + +def main + # > Step 04 Run Worker + worker = HATCHET.worker('streaming-worker', workflows: [STREAM_TASK]) + worker.start + # !! +end + +main if __FILE__ == $PROGRAM_NAME diff --git a/sdks/guides/ruby/web_scraping/mock_scraper.rb b/sdks/guides/ruby/web_scraping/mock_scraper.rb new file mode 100644 index 0000000000..5e4161cd93 --- /dev/null +++ b/sdks/guides/ruby/web_scraping/mock_scraper.rb @@ -0,0 +1,19 @@ +# frozen_string_literal: true + +require 'time' + +def mock_scrape(url) + { + 'url' => url, + 'title' => "Page: #{url}", + 'content' => "Mock scraped content from #{url}. In production, use Firecrawl, Browserbase, or Playwright here.", + 'scraped_at' => Time.now.utc.iso8601 + } +end + +def mock_extract(content) + { + 'summary' => content[0, 80], + 'word_count' => content.split.size.to_s + } +end diff --git a/sdks/guides/ruby/web_scraping/worker.rb b/sdks/guides/ruby/web_scraping/worker.rb new file mode 100644 index 0000000000..f4cd94e589 --- /dev/null +++ b/sdks/guides/ruby/web_scraping/worker.rb @@ -0,0 +1,72 @@ +# frozen_string_literal: true + +require 'hatchet-sdk' +require_relative 'mock_scraper' + +HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET) + +SCRAPE_WF = HATCHET.workflow(name: 'ScrapeUrl') +PROCESS_WF = HATCHET.workflow(name: 'ProcessContent') + +# > Step 01 Define Scrape Task +SCRAPE_WF.task(:scrape_url, execution_timeout: '2m', retries: 2) do |input, _ctx| + mock_scrape(input['url']) +end +# !! + +# > Step 02 Process Content +PROCESS_WF.task(:process_content) do |input, _ctx| + content = input['content'] + links = content.scan(%r{https?://[^\s<>"']+}) + summary = content[0, 200].strip + word_count = content.split.size + { 'summary' => summary, 'word_count' => word_count, 'links' => links } +end +# !! + +# > Step 03 Cron Workflow +CRON_WF = HATCHET.workflow(name: 'WebScrapeWorkflow', on_crons: ['0 */6 * * *']) + +CRON_WF.task(:scheduled_scrape) do |_input, _ctx| + urls = %w[ + https://example.com/pricing + https://example.com/blog + https://example.com/docs + ] + + results = urls.map do |url| + scraped = SCRAPE_WF.run('url' => url) + processed = PROCESS_WF.run('url' => url, 'content' => scraped['content']) + { 'url' => url }.merge(processed) + end + { 'refreshed' => results.size, 'results' => results } +end +# !! + +# > Step 04 Rate Limited Scrape +SCRAPE_RATE_LIMIT_KEY = 'scrape-rate-limit' + +RATE_LIMITED_WF = HATCHET.workflow(name: 'RateLimitedScrape') + +RATE_LIMITED_WF.task( + :rate_limited_scrape, + execution_timeout: '2m', + retries: 2, + rate_limits: [Hatchet::RateLimit.new(static_key: SCRAPE_RATE_LIMIT_KEY, units: 1)] +) do |input, _ctx| + mock_scrape(input['url']) +end +# !! + +def main + # > Step 05 Run Worker + HATCHET.rate_limits.put(SCRAPE_RATE_LIMIT_KEY, 10, :minute) + + worker = HATCHET.worker('web-scraping-worker', + slots: 5, + workflows: [SCRAPE_WF, PROCESS_WF, CRON_WF, RATE_LIMITED_WF]) + worker.start + # !! +end + +main if __FILE__ == $PROGRAM_NAME diff --git a/sdks/guides/ruby/webhook_processing/worker.rb b/sdks/guides/ruby/webhook_processing/worker.rb new file mode 100644 index 0000000000..d90b179e04 --- /dev/null +++ b/sdks/guides/ruby/webhook_processing/worker.rb @@ -0,0 +1,39 @@ +# frozen_string_literal: true + +require 'hatchet-sdk' + +HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET) + +# > Step 01 Define Webhook Task +PROCESS_WEBHOOK = HATCHET.task( + name: 'process-webhook', + on_events: ['webhook:stripe', 'webhook:github'] +) do |input, _ctx| + { 'processed' => input['event_id'], 'type' => input['type'] } +end + +# !! + +# > Step 02 Register Webhook +def forward_webhook(event_key, payload) + HATCHET.event.push(event_key, payload) +end +# forward_webhook("webhook:stripe", { "event_id" => "evt_123", "type" => "payment", "data" => {} }) +# !! + +# > Step 03 Process Payload +def validate_and_process(input) + raise 'event_id required for deduplication' if input['event_id'].to_s.empty? + + { 'processed' => input['event_id'], 'type' => input['type'] } +end +# !! + +def main + # > Step 04 Run Worker + worker = HATCHET.worker('webhook-worker', workflows: [PROCESS_WEBHOOK]) + worker.start + # !! +end + +main if __FILE__ == $PROGRAM_NAME diff --git a/sdks/guides/typescript/.eslintrc.json b/sdks/guides/typescript/.eslintrc.json new file mode 100644 index 0000000000..148879ef48 --- /dev/null +++ b/sdks/guides/typescript/.eslintrc.json @@ -0,0 +1,22 @@ +{ + "root": true, + "extends": ["airbnb-base", "prettier"], + "parser": "@typescript-eslint/parser", + "parserOptions": { "ecmaVersion": 2020, "sourceType": "module" }, + "plugins": ["@typescript-eslint"], + "env": { "node": true }, + "rules": { + "import/no-unresolved": "off", + "import/extensions": "off", + "import/prefer-default-export": "off", + "no-unused-vars": "off", + "@typescript-eslint/no-unused-vars": "off", + "no-plusplus": "off", + "no-restricted-syntax": "off", + "no-await-in-loop": "off", + "arrow-body-style": "off", + "no-use-before-define": "off", + "no-promise-executor-return": "off", + "no-console": "off" + } +} diff --git a/sdks/guides/typescript/ai-agents/mock-agent.ts b/sdks/guides/typescript/ai-agents/mock-agent.ts new file mode 100644 index 0000000000..3bbc2b559d --- /dev/null +++ b/sdks/guides/typescript/ai-agents/mock-agent.ts @@ -0,0 +1,29 @@ +/** Mock LLM and tools - no external API dependencies */ + +let callCount = 0; + +export interface LLMResponse { + content: string; + toolCalls: Array<{ name: string; args: Record }>; + done: boolean; +} + +export function callLlm(messages: Array<{ role: string; content: string }>): LLMResponse { + callCount += 1; + if (callCount === 1) { + return { + content: '', + toolCalls: [{ name: 'get_weather', args: { location: 'SF' } }], + done: false, + }; + } + return { content: "It's 72°F and sunny in SF.", toolCalls: [], done: true }; +} + +export function runTool(name: string, args: Record): string { + if (name === 'get_weather') { + const loc = String(args?.location ?? 'unknown'); + return `Weather in ${loc}: 72°F, sunny`; + } + return `Unknown tool: ${name}`; +} diff --git a/sdks/guides/typescript/ai-agents/worker.ts b/sdks/guides/typescript/ai-agents/worker.ts new file mode 100644 index 0000000000..2df0196a02 --- /dev/null +++ b/sdks/guides/typescript/ai-agents/worker.ts @@ -0,0 +1,16 @@ +import { hatchet } from '../../hatchet-client'; +import { agentTask, streamingAgentTask } from './workflow'; + +async function main() { + // > Step 04 Run Worker + const worker = await hatchet.worker('agent-worker', { + workflows: [agentTask, streamingAgentTask], + slots: 5, + }); + await worker.start(); + // !! +} + +if (require.main === module) { + main(); +} diff --git a/sdks/guides/typescript/ai-agents/workflow.ts b/sdks/guides/typescript/ai-agents/workflow.ts new file mode 100644 index 0000000000..c10af94f4d --- /dev/null +++ b/sdks/guides/typescript/ai-agents/workflow.ts @@ -0,0 +1,53 @@ +import { ConcurrencyLimitStrategy } from '@hatchet/protoc/v1/workflows'; +import { hatchet } from '../../hatchet-client'; +import { callLlm, runTool } from './mock-agent'; + +// > Step 01 Define Agent Task +export const agentTask = hatchet.durableTask({ + name: 'reasoning-loop-agent', + executionTimeout: '30m', + concurrency: { + expression: "input.session_id != null ? string(input.session_id) : 'constant'", + maxRuns: 1, + limitStrategy: ConcurrencyLimitStrategy.CANCEL_IN_PROGRESS, + }, + fn: async (input) => { + const query = (input as { query?: string })?.query ?? 'Hello'; + return agentReasoningLoop(query); + }, +}); +// !! + +// > Step 02 Reasoning Loop +async function agentReasoningLoop(query: string) { + const messages: Array<{ role: string; content: string }> = [{ role: 'user', content: query }]; + for (let i = 0; i < 10; i++) { + const resp = callLlm(messages); + if (resp.done) return { response: resp.content }; + for (const tc of resp.toolCalls) { + const result = runTool(tc.name, tc.args); + messages.push({ role: 'tool', content: result }); + } + } + return { response: 'Max iterations reached' }; +} +// !! + +// > Step 03 Stream Response +export const streamingAgentTask = hatchet.durableTask({ + name: 'streaming-agent-task', + executionTimeout: '30m', + concurrency: { + expression: "'constant'", + maxRuns: 1, + limitStrategy: ConcurrencyLimitStrategy.CANCEL_IN_PROGRESS, + }, + fn: async (_, ctx) => { + const tokens = ['Hello', ' ', 'world', '!']; + for (const t of tokens) { + ctx.putStream(t); + } + return { done: true }; + }, +}); +// !! diff --git a/sdks/guides/typescript/batch-processing/worker.ts b/sdks/guides/typescript/batch-processing/worker.ts new file mode 100644 index 0000000000..bccac1c3e7 --- /dev/null +++ b/sdks/guides/typescript/batch-processing/worker.ts @@ -0,0 +1,16 @@ +import { hatchet } from '../../hatchet-client'; +import { parentTask, childTask } from './workflow'; + +async function main() { + // > Step 04 Run Worker + const worker = await hatchet.worker('batch-worker', { + workflows: [parentTask, childTask], + slots: 20, + }); + await worker.start(); + // !! +} + +if (require.main === module) { + main(); +} diff --git a/sdks/guides/typescript/batch-processing/workflow.ts b/sdks/guides/typescript/batch-processing/workflow.ts new file mode 100644 index 0000000000..7e03e3a9db --- /dev/null +++ b/sdks/guides/typescript/batch-processing/workflow.ts @@ -0,0 +1,32 @@ +import { hatchet } from '../../hatchet-client'; + +type BatchInput = { items: string[] }; +type ItemInput = { item_id: string }; + +const childTask = hatchet.task({ + name: 'process-item', + fn: async (input) => ({ + status: 'done', + item_id: input.item_id, + }), +}); + +// > Step 01 Define Parent Task +const parentTask = hatchet.durableTask({ + name: 'spawn-children', + fn: async (input) => { + const results = await Promise.all( + input.items.map((itemId) => childTask.run({ item_id: itemId })) + ); + return { processed: results.length, results }; + }, +}); +// !! + +// > Step 03 Process Item +function processItem(input: ItemInput) { + return { status: 'done', item_id: input.item_id }; +} +// !! + +export { parentTask, childTask }; diff --git a/sdks/guides/typescript/document-processing/mock-ocr.ts b/sdks/guides/typescript/document-processing/mock-ocr.ts new file mode 100644 index 0000000000..552a29bc18 --- /dev/null +++ b/sdks/guides/typescript/document-processing/mock-ocr.ts @@ -0,0 +1,5 @@ +/** Mock OCR - no external dependencies */ + +export function parseDocument(content: Uint8Array): string { + return `Parsed text from ${content.length} bytes`; +} diff --git a/sdks/guides/typescript/document-processing/worker.ts b/sdks/guides/typescript/document-processing/worker.ts new file mode 100644 index 0000000000..b1029d1a7d --- /dev/null +++ b/sdks/guides/typescript/document-processing/worker.ts @@ -0,0 +1,15 @@ +import { hatchet } from '../../hatchet-client'; +import { docWf } from './workflow'; + +async function main() { + // > Step 04 Run Worker + const worker = await hatchet.worker('document-worker', { + workflows: [docWf], + }); + await worker.start(); + // !! +} + +if (require.main === module) { + main(); +} diff --git a/sdks/guides/typescript/document-processing/workflow.ts b/sdks/guides/typescript/document-processing/workflow.ts new file mode 100644 index 0000000000..6c19b33fa2 --- /dev/null +++ b/sdks/guides/typescript/document-processing/workflow.ts @@ -0,0 +1,41 @@ +import { hatchet } from '../../hatchet-client'; +import { parseDocument } from './mock-ocr'; + +type DocInput = { doc_id: string; content: Uint8Array }; + +// > Step 01 Define DAG +const docWf = hatchet.workflow({ name: 'DocumentPipeline' }); + +const ingest = docWf.task({ + name: 'ingest', + fn: async (input) => ({ doc_id: input.doc_id, content: input.content }), +}); + +// !! + +// > Step 02 Parse Stage +const parse = docWf.task({ + name: 'parse', + parents: [ingest], + fn: async (input, ctx) => { + const ingested = await ctx.parentOutput(ingest); + const text = parseDocument(ingested.content); + return { doc_id: input.doc_id, text }; + }, +}); + +// !! + +// > Step 03 Extract Stage +const extract = docWf.task({ + name: 'extract', + parents: [parse], + fn: async (input, ctx) => { + const parsed = await ctx.parentOutput(parse); + return { doc_id: parsed.doc_id, entities: ['entity1', 'entity2'] }; + }, +}); + +// !! + +export { docWf }; diff --git a/sdks/guides/typescript/evaluator-optimizer/mock-llm.ts b/sdks/guides/typescript/evaluator-optimizer/mock-llm.ts new file mode 100644 index 0000000000..23327e9a09 --- /dev/null +++ b/sdks/guides/typescript/evaluator-optimizer/mock-llm.ts @@ -0,0 +1,16 @@ +let generateCount = 0; + +export function mockGenerate(prompt: string): string { + generateCount++; + if (generateCount === 1) { + return 'Check out our product! Buy now!'; + } + return 'Discover how our tool saves teams 10 hours/week. Try it free.'; +} + +export function mockEvaluate(draft: string): { score: number; feedback: string } { + if (draft.length < 40) { + return { score: 0.4, feedback: 'Too short and pushy. Add a specific benefit and soften the CTA.' }; + } + return { score: 0.9, feedback: 'Clear value prop, appropriate tone.' }; +} diff --git a/sdks/guides/typescript/evaluator-optimizer/worker.ts b/sdks/guides/typescript/evaluator-optimizer/worker.ts new file mode 100644 index 0000000000..1963556dcb --- /dev/null +++ b/sdks/guides/typescript/evaluator-optimizer/worker.ts @@ -0,0 +1,16 @@ +import { hatchet } from '../../hatchet-client'; +import { generatorTask, evaluatorTask, optimizerTask } from './workflow'; + +async function main() { + // > Step 03 Run Worker + const worker = await hatchet.worker('evaluator-optimizer-worker', { + workflows: [generatorTask, evaluatorTask, optimizerTask], + slots: 5, + }); + await worker.start(); + // !! +} + +if (require.main === module) { + main(); +} diff --git a/sdks/guides/typescript/evaluator-optimizer/workflow.ts b/sdks/guides/typescript/evaluator-optimizer/workflow.ts new file mode 100644 index 0000000000..bff1227f4d --- /dev/null +++ b/sdks/guides/typescript/evaluator-optimizer/workflow.ts @@ -0,0 +1,72 @@ +import { hatchet } from '../../hatchet-client'; +import { mockGenerate, mockEvaluate } from './mock-llm'; + +type GeneratorInput = { + topic: string; + audience: string; + previousDraft?: string; + feedback?: string; +}; + +type EvaluatorInput = { + draft: string; + topic: string; + audience: string; +}; + +// > Step 01 Define Tasks +const generatorTask = hatchet.task({ + name: 'generate-draft', + fn: async (input: GeneratorInput) => { + const prompt = input.feedback + ? `Improve this draft.\n\nDraft: ${input.previousDraft}\nFeedback: ${input.feedback}` + : `Write a social media post about "${input.topic}" for ${input.audience}. Under 100 words.`; + return { draft: mockGenerate(prompt) }; + }, +}); + +const evaluatorTask = hatchet.task({ + name: 'evaluate-draft', + fn: async (input: EvaluatorInput) => { + return mockEvaluate(input.draft); + }, +}); +// !! + +// > Step 02 Optimization Loop +const optimizerTask = hatchet.durableTask({ + name: 'evaluator-optimizer', + executionTimeout: '5m', + fn: async (input: { topic: string; audience: string }) => { + const maxIterations = 3; + const threshold = 0.8; + let draft = ''; + let feedback = ''; + + for (let i = 0; i < maxIterations; i++) { + const generated = await generatorTask.run({ + topic: input.topic, + audience: input.audience, + previousDraft: draft || undefined, + feedback: feedback || undefined, + }); + draft = generated.draft; + + const evaluation = await evaluatorTask.run({ + draft, + topic: input.topic, + audience: input.audience, + }); + + if (evaluation.score >= threshold) { + return { draft, iterations: i + 1, score: evaluation.score }; + } + feedback = evaluation.feedback; + } + + return { draft, iterations: maxIterations, score: -1 }; + }, +}); +// !! + +export { generatorTask, evaluatorTask, optimizerTask }; diff --git a/sdks/guides/typescript/event-driven/trigger.ts b/sdks/guides/typescript/event-driven/trigger.ts new file mode 100644 index 0000000000..aff55fac1f --- /dev/null +++ b/sdks/guides/typescript/event-driven/trigger.ts @@ -0,0 +1,9 @@ +import { hatchet } from '../../hatchet-client'; + +// > Step 03 Push Event +// Push an event to trigger the workflow. Use the same key as onEvents. +hatchet.event.push('order:created', { + message: 'Order #1234', + source: 'webhook', +}); +// !! diff --git a/sdks/guides/typescript/event-driven/worker.ts b/sdks/guides/typescript/event-driven/worker.ts new file mode 100644 index 0000000000..994172291b --- /dev/null +++ b/sdks/guides/typescript/event-driven/worker.ts @@ -0,0 +1,15 @@ +import { hatchet } from '../../hatchet-client'; +import { eventWf } from './workflow'; + +async function main() { + // > Step 04 Run Worker + const worker = await hatchet.worker('event-driven-worker', { + workflows: [eventWf], + }); + await worker.start(); + // !! +} + +if (require.main === module) { + main(); +} diff --git a/sdks/guides/typescript/event-driven/workflow.ts b/sdks/guides/typescript/event-driven/workflow.ts new file mode 100644 index 0000000000..5520617450 --- /dev/null +++ b/sdks/guides/typescript/event-driven/workflow.ts @@ -0,0 +1,25 @@ +import { hatchet } from '../../hatchet-client'; + +type EventInput = { message: string; source?: string }; + +// > Step 01 Define Event Task +const eventWf = hatchet.workflow({ + name: 'EventDrivenWorkflow', + onEvents: ['order:created', 'user:signup'], +}); + +eventWf.task({ + name: 'process-event', + fn: async (input) => ({ + processed: input.message, + source: input.source ?? 'api', + }), +}); +// !! + +// > Step 02 Register Event Trigger +// Push an event from your app to trigger the workflow. Use the same key as onEvents. +hatchet.event.push('order:created', { message: 'Order #1234', source: 'webhook' }); +// !! + +export { eventWf }; diff --git a/sdks/guides/typescript/human-in-the-loop/trigger.ts b/sdks/guides/typescript/human-in-the-loop/trigger.ts new file mode 100644 index 0000000000..cf451e3f2e --- /dev/null +++ b/sdks/guides/typescript/human-in-the-loop/trigger.ts @@ -0,0 +1,11 @@ +import { hatchet } from '../../hatchet-client'; + +// > Step 03 Push Approval Event +// Include the runId so the event matches the specific task waiting for it. +export async function pushApproval(runId: string, approved: boolean, reason = '') { + await hatchet.event.push('approval:decision', { runId, approved, reason }); +} + +// Approve: await pushApproval('run-id-from-ui', true); +// Reject: await pushApproval('run-id-from-ui', false, 'needs review'); +// !! diff --git a/sdks/guides/typescript/human-in-the-loop/worker.ts b/sdks/guides/typescript/human-in-the-loop/worker.ts new file mode 100644 index 0000000000..975d64dab5 --- /dev/null +++ b/sdks/guides/typescript/human-in-the-loop/worker.ts @@ -0,0 +1,15 @@ +import { hatchet } from '../../hatchet-client'; +import { approvalTask } from './workflow'; + +async function main() { + // > Step 04 Run Worker + const worker = await hatchet.worker('human-in-the-loop-worker', { + workflows: [approvalTask], + }); + await worker.start(); + // !! +} + +if (require.main === module) { + main(); +} diff --git a/sdks/guides/typescript/human-in-the-loop/workflow.ts b/sdks/guides/typescript/human-in-the-loop/workflow.ts new file mode 100644 index 0000000000..b38641ffef --- /dev/null +++ b/sdks/guides/typescript/human-in-the-loop/workflow.ts @@ -0,0 +1,29 @@ +import { DurableContext } from '@hatchet-dev/typescript-sdk'; +import { hatchet } from '../../hatchet-client'; + +const APPROVAL_EVENT_KEY = 'approval:decision'; + +// > Step 02 Wait For Event +function waitForApproval(ctx: DurableContext) { + const runId = ctx.workflowRunId(); + return ctx.waitFor({ + eventKey: APPROVAL_EVENT_KEY, + expression: `input.runId == '${runId}'`, + }); +} +// !! + +// > Step 01 Define Approval Task +export const approvalTask = hatchet.durableTask({ + name: 'approval-task', + executionTimeout: '30m', + fn: async (_, ctx) => { + const proposedAction = { action: 'send_email', to: 'user@example.com' }; + const approval = waitForApproval(ctx); + if (approval?.approved) { + return { status: 'approved', action: proposedAction }; + } + return { status: 'rejected', reason: approval?.reason ?? '' }; + }, +}); +// !! diff --git a/sdks/guides/typescript/integrations/embedding-cohere.ts b/sdks/guides/typescript/integrations/embedding-cohere.ts new file mode 100644 index 0000000000..2eb0b32ba6 --- /dev/null +++ b/sdks/guides/typescript/integrations/embedding-cohere.ts @@ -0,0 +1,17 @@ +// Third-party integration - requires: pnpm add cohere-ai +// See: /guides/rag-and-indexing + +import Cohere from 'cohere-ai'; + +const client = new Cohere(); + +// > Cohere embedding usage +export async function embed(text: string): Promise { + const r = await client.embed({ + texts: [text], + model: 'embed-english-v3.0', + inputType: 'search_document', + }); + return r.embeddings[0] ?? []; +} +// !! diff --git a/sdks/guides/typescript/integrations/embedding-openai.ts b/sdks/guides/typescript/integrations/embedding-openai.ts new file mode 100644 index 0000000000..5540b485d0 --- /dev/null +++ b/sdks/guides/typescript/integrations/embedding-openai.ts @@ -0,0 +1,16 @@ +// Third-party integration - requires: pnpm add openai +// See: /guides/rag-and-indexing + +import OpenAI from 'openai'; + +const client = new OpenAI(); + +// > OpenAI embedding usage +export async function embed(text: string): Promise { + const r = await client.embeddings.create({ + model: 'text-embedding-3-small', + input: text, + }); + return r.data[0]?.embedding ?? []; +} +// !! diff --git a/sdks/guides/typescript/integrations/llm-anthropic.ts b/sdks/guides/typescript/integrations/llm-anthropic.ts new file mode 100644 index 0000000000..a5dd3235e6 --- /dev/null +++ b/sdks/guides/typescript/integrations/llm-anthropic.ts @@ -0,0 +1,29 @@ +// Third-party integration - requires: pnpm add @anthropic-ai/sdk +// See: /guides/ai-agents + +import Anthropic from '@anthropic-ai/sdk'; + +const client = new Anthropic(); + +// > Anthropic usage +export async function complete(messages: Array<{ role: string; content: string }>) { + const resp = await client.messages.create({ + model: 'claude-3-5-haiku-20241022', + max_tokens: 1024, + messages: messages.map((m) => ({ role: m.role as 'user' | 'assistant', content: m.content })), + }); + const toolUse = resp.content.find((b) => b.type === 'tool_use'); + if (toolUse && toolUse.type === 'tool_use') { + return { + content: '', + toolCalls: [{ name: toolUse.name, args: toolUse.input }], + done: false, + }; + } + const text = resp.content + .filter((b): b is { type: 'text'; text: string } => b.type === 'text') + .map((b) => b.text) + .join(''); + return { content: text, toolCalls: [], done: true }; +} +// !! diff --git a/sdks/guides/typescript/integrations/llm-groq.ts b/sdks/guides/typescript/integrations/llm-groq.ts new file mode 100644 index 0000000000..24f794437f --- /dev/null +++ b/sdks/guides/typescript/integrations/llm-groq.ts @@ -0,0 +1,40 @@ +// Third-party integration - requires: pnpm add groq-sdk +// See: /guides/ai-agents + +import Groq from 'groq-sdk'; + +const client = new Groq(); + +// > Groq usage +export async function complete(messages: Array<{ role: string; content: string }>) { + const r = await client.chat.completions.create({ + model: 'llama-3.3-70b-versatile', + messages: messages as Groq.ChatCompletionMessageParam[], + tool_choice: 'auto', + tools: [ + { + type: 'function', + function: { + name: 'get_weather', + description: 'Get weather for a location', + parameters: { + type: 'object', + properties: { location: { type: 'string' } }, + required: ['location'], + }, + }, + }, + ], + }); + const msg = r.choices[0]?.message; + const toolCalls = (msg?.tool_calls ?? []).map((tc) => ({ + name: tc.function?.name ?? '', + args: JSON.parse(tc.function?.arguments ?? '{}'), + })); + return { + content: msg?.content ?? '', + toolCalls, + done: toolCalls.length === 0, + }; +} +// !! diff --git a/sdks/guides/typescript/integrations/llm-openai.ts b/sdks/guides/typescript/integrations/llm-openai.ts new file mode 100644 index 0000000000..2bf689eb26 --- /dev/null +++ b/sdks/guides/typescript/integrations/llm-openai.ts @@ -0,0 +1,40 @@ +// Third-party integration - requires: pnpm add openai +// See: /guides/ai-agents + +import OpenAI from 'openai'; + +const client = new OpenAI(); + +// > OpenAI usage +export async function complete(messages: Array<{ role: string; content: string }>) { + const r = await client.chat.completions.create({ + model: 'gpt-4o-mini', + messages: messages as OpenAI.ChatCompletionMessageParam[], + tool_choice: 'auto', + tools: [ + { + type: 'function', + function: { + name: 'get_weather', + description: 'Get weather for a location', + parameters: { + type: 'object', + properties: { location: { type: 'string' } }, + required: ['location'], + }, + }, + }, + ], + }); + const msg = r.choices[0]?.message; + const toolCalls = (msg?.tool_calls ?? []).map((tc) => ({ + name: tc.function?.name ?? '', + args: JSON.parse(tc.function?.arguments ?? '{}'), + })); + return { + content: msg?.content ?? '', + toolCalls, + done: toolCalls.length === 0, + }; +} +// !! diff --git a/sdks/guides/typescript/integrations/llm-vercel-ai-sdk.ts b/sdks/guides/typescript/integrations/llm-vercel-ai-sdk.ts new file mode 100644 index 0000000000..858c04bc8e --- /dev/null +++ b/sdks/guides/typescript/integrations/llm-vercel-ai-sdk.ts @@ -0,0 +1,33 @@ +// Third-party integration - requires: pnpm add ai @ai-sdk/openai +// See: /guides/ai-agents +// Vercel AI SDK: unified interface for OpenAI, Anthropic, Google, etc. + +import { generateText, tool } from 'ai'; +import { openai } from '@ai-sdk/openai'; +import { z } from 'zod'; + +// > Vercel AI SDK usage +export async function complete(messages: Array<{ role: string; content: string }>) { + const tools = { + get_weather: tool({ + description: 'Get weather for a location', + parameters: z.object({ location: z.string() }), + execute: async ({ location }) => `Weather in ${location}: 72°F, sunny`, + }), + }; + const { text, toolCalls } = await generateText({ + model: openai('gpt-4o-mini'), + messages: messages.map((m) => ({ + role: m.role as 'user' | 'assistant' | 'system', + content: m.content, + })), + maxSteps: 5, // SDK runs tool loop internally + tools, + }); + return { + content: text, + tool_calls: toolCalls.map((tc) => ({ name: tc.toolName, args: tc.args })), + done: true, // maxSteps handles full agent loop + }; +} +// !! diff --git a/sdks/guides/typescript/integrations/ocr-google-vision.ts b/sdks/guides/typescript/integrations/ocr-google-vision.ts new file mode 100644 index 0000000000..05231cd0f1 --- /dev/null +++ b/sdks/guides/typescript/integrations/ocr-google-vision.ts @@ -0,0 +1,13 @@ +// Third-party integration - requires: pnpm add @google-cloud/vision +// See: /guides/document-processing + +import { ImageAnnotatorClient } from '@google-cloud/vision'; + +const client = new ImageAnnotatorClient(); + +// > Google Vision usage +export async function parseDocument(content: Buffer): Promise { + const [result] = await client.documentTextDetection({ image: { content } }); + return result.fullTextAnnotation?.text ?? ''; +} +// !! diff --git a/sdks/guides/typescript/integrations/ocr-tesseract.ts b/sdks/guides/typescript/integrations/ocr-tesseract.ts new file mode 100644 index 0000000000..5843f2f81b --- /dev/null +++ b/sdks/guides/typescript/integrations/ocr-tesseract.ts @@ -0,0 +1,11 @@ +// Third-party integration - requires: pnpm add tesseract.js +// See: /guides/document-processing + +import Tesseract from 'tesseract.js'; + +// > Tesseract usage +export async function parseDocument(content: Buffer): Promise { + const { data } = await Tesseract.recognize(content); + return data.text; +} +// !! diff --git a/sdks/guides/typescript/integrations/scraper-browserbase.ts b/sdks/guides/typescript/integrations/scraper-browserbase.ts new file mode 100644 index 0000000000..3eda9fa1b5 --- /dev/null +++ b/sdks/guides/typescript/integrations/scraper-browserbase.ts @@ -0,0 +1,21 @@ +// Third-party integration - requires: pnpm add @browserbasehq/sdk playwright +// See: /guides/web-scraping + +import Browserbase from '@browserbasehq/sdk'; +import { chromium } from 'playwright'; + +const bb = new Browserbase({ apiKey: process.env.BROWSERBASE_API_KEY! }); + +// > Browserbase usage +export async function scrapeUrl(url: string) { + const session = await bb.sessions.create({ + projectId: process.env.BROWSERBASE_PROJECT_ID!, + }); + const browser = await chromium.connectOverCDP(session.connectUrl); + const page = browser.contexts()[0].pages()[0]; + await page.goto(url); + const content = await page.content(); + await browser.close(); + return { url, content }; +} +// !! diff --git a/sdks/guides/typescript/integrations/scraper-firecrawl.ts b/sdks/guides/typescript/integrations/scraper-firecrawl.ts new file mode 100644 index 0000000000..346bbded97 --- /dev/null +++ b/sdks/guides/typescript/integrations/scraper-firecrawl.ts @@ -0,0 +1,17 @@ +// Third-party integration - requires: pnpm add @mendable/firecrawl-js +// See: /guides/web-scraping + +import FirecrawlApp from '@mendable/firecrawl-js'; + +const firecrawl = new FirecrawlApp({ apiKey: process.env.FIRECRAWL_API_KEY! }); + +// > Firecrawl usage +export async function scrapeUrl(url: string) { + const result = await firecrawl.scrapeUrl(url, { formats: ['markdown'] }); + return { + url, + content: result.markdown, + metadata: result.metadata, + }; +} +// !! diff --git a/sdks/guides/typescript/integrations/scraper-openai.ts b/sdks/guides/typescript/integrations/scraper-openai.ts new file mode 100644 index 0000000000..9330637db5 --- /dev/null +++ b/sdks/guides/typescript/integrations/scraper-openai.ts @@ -0,0 +1,17 @@ +// Third-party integration - requires: pnpm add openai +// See: /guides/web-scraping + +import OpenAI from 'openai'; + +const openai = new OpenAI(); + +// > OpenAI web search usage +export async function searchAndExtract(query: string) { + const response = await openai.responses.create({ + model: 'gpt-4o-mini', + tools: [{ type: 'web_search' }], + input: query, + }); + return { query, content: response.output_text }; +} +// !! diff --git a/sdks/guides/typescript/integrations/scraper-playwright.ts b/sdks/guides/typescript/integrations/scraper-playwright.ts new file mode 100644 index 0000000000..5344504c57 --- /dev/null +++ b/sdks/guides/typescript/integrations/scraper-playwright.ts @@ -0,0 +1,15 @@ +// Third-party integration - requires: pnpm add playwright +// See: /guides/web-scraping + +import { chromium } from 'playwright'; + +// > Playwright usage +export async function scrapeUrl(url: string) { + const browser = await chromium.launch({ headless: true }); + const page = await browser.newPage(); + await page.goto(url); + const content = await page.content(); + await browser.close(); + return { url, content }; +} +// !! diff --git a/sdks/guides/typescript/llm-pipelines/mock-llm.ts b/sdks/guides/typescript/llm-pipelines/mock-llm.ts new file mode 100644 index 0000000000..43bd30b59d --- /dev/null +++ b/sdks/guides/typescript/llm-pipelines/mock-llm.ts @@ -0,0 +1,5 @@ +/** Mock LLM - no external API dependencies */ + +export function generate(prompt: string): { content: string; valid: boolean } { + return { content: `Generated for: ${prompt.slice(0, 50)}...`, valid: true }; +} diff --git a/sdks/guides/typescript/llm-pipelines/worker.ts b/sdks/guides/typescript/llm-pipelines/worker.ts new file mode 100644 index 0000000000..dd92009532 --- /dev/null +++ b/sdks/guides/typescript/llm-pipelines/worker.ts @@ -0,0 +1,15 @@ +import { hatchet } from '../../hatchet-client'; +import { llmWf } from './workflow'; + +async function main() { + // > Step 04 Run Worker + const worker = await hatchet.worker('llm-pipeline-worker', { + workflows: [llmWf], + }); + await worker.start(); + // !! +} + +if (require.main === module) { + main(); +} diff --git a/sdks/guides/typescript/llm-pipelines/workflow.ts b/sdks/guides/typescript/llm-pipelines/workflow.ts new file mode 100644 index 0000000000..1d72fd8262 --- /dev/null +++ b/sdks/guides/typescript/llm-pipelines/workflow.ts @@ -0,0 +1,36 @@ +import { hatchet } from '../../hatchet-client'; +import { generate } from './mock-llm'; + +type PipelineInput = { prompt: string }; + +// > Step 01 Define Pipeline +const llmWf = hatchet.workflow({ name: 'LLMPipeline' }); + +const promptTask = llmWf.task({ + name: 'prompt-task', + fn: async (input) => ({ prompt: input.prompt }), +}); + +// !! + +// > Step 02 Prompt Task +function buildPrompt(userInput: string, context = ''): string { + return `Process the following: ${userInput}${context ? `\nContext: ${context}` : ''}`; +} +// !! + +// > Step 03 Validate Task +const generateTask = llmWf.task({ + name: 'generate-task', + parents: [promptTask], + fn: async (input, ctx) => { + const prev = await ctx.parentOutput(promptTask); + const output = generate(prev.prompt); + if (!output.valid) throw new Error('Validation failed'); + return output; + }, +}); + +// !! + +export { llmWf }; diff --git a/sdks/guides/typescript/multi-agent/mock-llm.ts b/sdks/guides/typescript/multi-agent/mock-llm.ts new file mode 100644 index 0000000000..f5648650f6 --- /dev/null +++ b/sdks/guides/typescript/multi-agent/mock-llm.ts @@ -0,0 +1,23 @@ +let orchestratorCallCount = 0; + +export interface ToolCallResponse { + done: boolean; + content: string; + toolCall?: { name: string; args: { task: string } }; +} + +export function mockOrchestratorLlm(messages: Array<{ role: string; content: string }>): ToolCallResponse { + orchestratorCallCount++; + switch (orchestratorCallCount) { + case 1: + return { done: false, content: '', toolCall: { name: 'research', args: { task: 'Find key facts about the topic' } } }; + case 2: + return { done: false, content: '', toolCall: { name: 'writing', args: { task: 'Write a summary from the research' } } }; + default: + return { done: true, content: 'Here is the final report combining research and writing.' }; + } +} + +export function mockSpecialistLlm(task: string, role: string): string { + return `[${role}] Completed: ${task}`; +} diff --git a/sdks/guides/typescript/multi-agent/worker.ts b/sdks/guides/typescript/multi-agent/worker.ts new file mode 100644 index 0000000000..9b9c8c5414 --- /dev/null +++ b/sdks/guides/typescript/multi-agent/worker.ts @@ -0,0 +1,16 @@ +import { hatchet } from '../../hatchet-client'; +import { researchTask, writingTask, codeTask, orchestrator } from './workflow'; + +async function main() { + // > Step 03 Run Worker + const worker = await hatchet.worker('multi-agent-worker', { + workflows: [researchTask, writingTask, codeTask, orchestrator], + slots: 10, + }); + await worker.start(); + // !! +} + +if (require.main === module) { + main(); +} diff --git a/sdks/guides/typescript/multi-agent/workflow.ts b/sdks/guides/typescript/multi-agent/workflow.ts new file mode 100644 index 0000000000..6066f351b2 --- /dev/null +++ b/sdks/guides/typescript/multi-agent/workflow.ts @@ -0,0 +1,71 @@ +import { hatchet } from '../../hatchet-client'; +import { mockOrchestratorLlm, mockSpecialistLlm } from './mock-llm'; + +type SpecialistInput = { task: string; context?: string }; + +// > Step 01 Specialist Agents +const researchTask = hatchet.durableTask({ + name: 'research-specialist', + executionTimeout: '3m', + fn: async (input: SpecialistInput) => { + return { result: mockSpecialistLlm(input.task, 'research') }; + }, +}); + +const writingTask = hatchet.durableTask({ + name: 'writing-specialist', + executionTimeout: '2m', + fn: async (input: SpecialistInput) => { + return { result: mockSpecialistLlm(input.task, 'writing') }; + }, +}); + +const codeTask = hatchet.durableTask({ + name: 'code-specialist', + executionTimeout: '2m', + fn: async (input: SpecialistInput) => { + return { result: mockSpecialistLlm(input.task, 'code') }; + }, +}); +// !! + +// > Step 02 Orchestrator Loop +const specialists: Record = { + research: researchTask, + writing: writingTask, + code: codeTask, +}; + +const orchestrator = hatchet.durableTask({ + name: 'multi-agent-orchestrator', + executionTimeout: '15m', + fn: async (input: { goal: string }) => { + const messages: Array<{ role: string; content: string }> = [ + { role: 'user', content: input.goal }, + ]; + + for (let i = 0; i < 10; i++) { + const response = mockOrchestratorLlm(messages); + + if (response.done) return { result: response.content }; + + const specialist = specialists[response.toolCall!.name]; + if (!specialist) throw new Error(`Unknown specialist: ${response.toolCall!.name}`); + + const { result } = await specialist.run({ + task: response.toolCall!.args.task, + context: messages.map((m) => m.content).join('\n'), + }); + + messages.push( + { role: 'assistant', content: `Called ${response.toolCall!.name}` }, + { role: 'tool', content: result } + ); + } + + return { result: 'Max iterations reached' }; + }, +}); +// !! + +export { researchTask, writingTask, codeTask, orchestrator }; diff --git a/sdks/guides/typescript/package.json b/sdks/guides/typescript/package.json new file mode 100644 index 0000000000..565ba3e75b --- /dev/null +++ b/sdks/guides/typescript/package.json @@ -0,0 +1,34 @@ +{ + "name": "hatchet-guides-typescript", + "version": "0.0.0", + "private": true, + "description": "Hatchet guide examples (TypeScript) - docs snippets with integration deps", + "scripts": { + "lint:check": "eslint \"**/*.ts\"", + "lint:fix": "eslint . --fix" + }, + "dependencies": { + "@hatchet-dev/typescript-sdk": "file:../../typescript", + "@anthropic-ai/sdk": "^0.32.1", + "@ai-sdk/openai": "^1.0.0", + "@browserbasehq/sdk": "^2.7.0", + "@google-cloud/vision": "^4.0.0", + "@mendable/firecrawl-js": "^4.15.0", + "ai": "^4.0.0", + "cohere-ai": "^7.0.0", + "groq-sdk": "^0.5.0", + "openai": "^4.0.0", + "playwright": "^1.49.0", + "tesseract.js": "^5.0.0", + "zod": "^3.24.0" + }, + "devDependencies": { + "@types/node": "^22.0.0", + "@typescript-eslint/eslint-plugin": "^6.21.0", + "@typescript-eslint/parser": "^6.21.0", + "eslint": "^8.56.0", + "eslint-config-airbnb-base": "^15.0.0", + "eslint-config-prettier": "^9.0.0", + "typescript": "^5.8.0" + } +} diff --git a/sdks/guides/typescript/parallelization/mock-llm.ts b/sdks/guides/typescript/parallelization/mock-llm.ts new file mode 100644 index 0000000000..8528634ce2 --- /dev/null +++ b/sdks/guides/typescript/parallelization/mock-llm.ts @@ -0,0 +1,15 @@ +export function mockGenerateContent(message: string): string { + return `Here is a helpful response to: ${message}`; +} + +export function mockSafetyCheck(message: string): { safe: boolean; reason: string } { + if (message.toLowerCase().includes('unsafe')) { + return { safe: false, reason: 'Content flagged as potentially unsafe.' }; + } + return { safe: true, reason: 'Content is appropriate.' }; +} + +export function mockEvaluate(content: string): { score: number; approved: boolean } { + const score = content.length > 20 ? 0.85 : 0.3; + return { score, approved: score >= 0.7 }; +} diff --git a/sdks/guides/typescript/parallelization/worker.ts b/sdks/guides/typescript/parallelization/worker.ts new file mode 100644 index 0000000000..40bcb7fc82 --- /dev/null +++ b/sdks/guides/typescript/parallelization/worker.ts @@ -0,0 +1,16 @@ +import { hatchet } from '../../hatchet-client'; +import { contentTask, safetyTask, evaluateTask, sectioningTask, votingTask } from './workflow'; + +async function main() { + // > Step 04 Run Worker + const worker = await hatchet.worker('parallelization-worker', { + workflows: [contentTask, safetyTask, evaluateTask, sectioningTask, votingTask], + slots: 10, + }); + await worker.start(); + // !! +} + +if (require.main === module) { + main(); +} diff --git a/sdks/guides/typescript/parallelization/workflow.ts b/sdks/guides/typescript/parallelization/workflow.ts new file mode 100644 index 0000000000..00ef640042 --- /dev/null +++ b/sdks/guides/typescript/parallelization/workflow.ts @@ -0,0 +1,70 @@ +import { hatchet } from '../../hatchet-client'; +import { mockGenerateContent, mockSafetyCheck, mockEvaluate } from './mock-llm'; + +type MessageInput = { message: string }; + +// > Step 01 Parallel Tasks +const contentTask = hatchet.task({ + name: 'generate-content', + fn: async (input: MessageInput) => { + return { content: mockGenerateContent(input.message) }; + }, +}); + +const safetyTask = hatchet.task({ + name: 'safety-check', + fn: async (input: MessageInput) => { + return mockSafetyCheck(input.message); + }, +}); + +const evaluateTask = hatchet.task({ + name: 'evaluate-content', + fn: async (input: { content: string }) => { + return mockEvaluate(input.content); + }, +}); +// !! + +// > Step 02 Sectioning +const sectioningTask = hatchet.durableTask({ + name: 'parallel-sectioning', + executionTimeout: '2m', + fn: async (input: MessageInput) => { + const [content, safety] = await Promise.all([ + contentTask.run(input), + safetyTask.run(input), + ]); + + if (!safety.safe) { + return { blocked: true, reason: safety.reason }; + } + return { blocked: false, content: content.content }; + }, +}); +// !! + +// > Step 03 Voting +const votingTask = hatchet.durableTask({ + name: 'parallel-voting', + executionTimeout: '3m', + fn: async (input: { content: string }) => { + const votes = await Promise.all([ + evaluateTask.run(input), + evaluateTask.run(input), + evaluateTask.run(input), + ]); + + const approvals = votes.filter((v) => v.approved).length; + const avgScore = votes.reduce((sum, v) => sum + v.score, 0) / votes.length; + + return { + approved: approvals >= 2, + averageScore: avgScore, + votes: votes.length, + }; + }, +}); +// !! + +export { contentTask, safetyTask, evaluateTask, sectioningTask, votingTask }; diff --git a/sdks/guides/typescript/pnpm-lock.yaml b/sdks/guides/typescript/pnpm-lock.yaml new file mode 100644 index 0000000000..f34881fe8d --- /dev/null +++ b/sdks/guides/typescript/pnpm-lock.yaml @@ -0,0 +1,4674 @@ +lockfileVersion: '9.0' + +settings: + autoInstallPeers: true + excludeLinksFromLockfile: false + +importers: + + .: + dependencies: + '@ai-sdk/openai': + specifier: ^1.0.0 + version: 1.3.24(zod@3.25.76) + '@anthropic-ai/sdk': + specifier: ^0.32.1 + version: 0.32.1 + '@browserbasehq/sdk': + specifier: ^2.7.0 + version: 2.7.0 + '@google-cloud/vision': + specifier: ^4.0.0 + version: 4.3.3 + '@hatchet-dev/typescript-sdk': + specifier: file:../../typescript + version: file:../../typescript + '@mendable/firecrawl-js': + specifier: ^4.15.0 + version: 4.15.0 + ai: + specifier: ^4.0.0 + version: 4.3.19(react@19.2.4)(zod@3.25.76) + cohere-ai: + specifier: ^7.0.0 + version: 7.20.0 + groq-sdk: + specifier: ^0.5.0 + version: 0.5.0 + openai: + specifier: ^4.0.0 + version: 4.104.0(zod@3.25.76) + playwright: + specifier: ^1.49.0 + version: 1.58.2 + tesseract.js: + specifier: ^5.0.0 + version: 5.1.1 + zod: + specifier: ^3.24.0 + version: 3.25.76 + devDependencies: + '@types/node': + specifier: ^22.0.0 + version: 22.19.13 + '@typescript-eslint/eslint-plugin': + specifier: ^6.21.0 + version: 6.21.0(@typescript-eslint/parser@6.21.0(eslint@8.57.1)(typescript@5.9.3))(eslint@8.57.1)(typescript@5.9.3) + '@typescript-eslint/parser': + specifier: ^6.21.0 + version: 6.21.0(eslint@8.57.1)(typescript@5.9.3) + eslint: + specifier: ^8.56.0 + version: 8.57.1 + eslint-config-airbnb-base: + specifier: ^15.0.0 + version: 15.0.0(eslint-plugin-import@2.32.0(@typescript-eslint/parser@6.21.0(eslint@8.57.1)(typescript@5.9.3))(eslint@8.57.1))(eslint@8.57.1) + eslint-config-prettier: + specifier: ^9.0.0 + version: 9.1.2(eslint@8.57.1) + typescript: + specifier: ^5.8.0 + version: 5.9.3 + +packages: + + '@ai-sdk/openai@1.3.24': + resolution: {integrity: sha512-GYXnGJTHRTZc4gJMSmFRgEQudjqd4PUN0ZjQhPwOAYH1yOAvQoG/Ikqs+HyISRbLPCrhbZnPKCNHuRU4OfpW0Q==} + engines: {node: '>=18'} + peerDependencies: + zod: ^3.0.0 + + '@ai-sdk/provider-utils@2.2.8': + resolution: {integrity: sha512-fqhG+4sCVv8x7nFzYnFo19ryhAa3w096Kmc3hWxMQfW/TubPOmt3A6tYZhl4mUfQWWQMsuSkLrtjlWuXBVSGQA==} + engines: {node: '>=18'} + peerDependencies: + zod: ^3.23.8 + + '@ai-sdk/provider@1.1.3': + resolution: {integrity: sha512-qZMxYJ0qqX/RfnuIaab+zp8UAeJn/ygXXAffR5I4N0n1IrvA6qBsjc8hXLmBiMV2zoXlifkacF7sEFnYnjBcqg==} + engines: {node: '>=18'} + + '@ai-sdk/react@1.2.12': + resolution: {integrity: sha512-jK1IZZ22evPZoQW3vlkZ7wvjYGYF+tRBKXtrcolduIkQ/m/sOAVcVeVDUDvh1T91xCnWCdUGCPZg2avZ90mv3g==} + engines: {node: '>=18'} + peerDependencies: + react: ^18 || ^19 || ^19.0.0-rc + zod: ^3.23.8 + peerDependenciesMeta: + zod: + optional: true + + '@ai-sdk/ui-utils@1.2.11': + resolution: {integrity: sha512-3zcwCc8ezzFlwp3ZD15wAPjf2Au4s3vAbKsXQVyhxODHcmu0iyPO2Eua6D/vicq/AUm/BAo60r97O6HU+EI0+w==} + engines: {node: '>=18'} + peerDependencies: + zod: ^3.23.8 + + '@anthropic-ai/sdk@0.32.1': + resolution: {integrity: sha512-U9JwTrDvdQ9iWuABVsMLj8nJVwAyQz6QXvgLsVhryhCEPkLsbcP/MXxm+jYcAwLoV8ESbaTTjnD4kuAFa+Hyjg==} + + '@aws-crypto/sha256-browser@5.2.0': + resolution: {integrity: sha512-AXfN/lGotSQwu6HNcEsIASo7kWXZ5HYWvfOmSNKDsEqC4OashTp8alTmaz+F7TC2L083SFv5RdB+qU3Vs1kZqw==} + + '@aws-crypto/sha256-js@5.2.0': + resolution: {integrity: sha512-FFQQyu7edu4ufvIZ+OadFpHHOt+eSTBaYaki44c+akjg7qZg9oOQeLlk77F6tSYqjDAFClrHJk9tMf0HdVyOvA==} + engines: {node: '>=16.0.0'} + + '@aws-crypto/supports-web-crypto@5.2.0': + resolution: {integrity: sha512-iAvUotm021kM33eCdNfwIN//F77/IADDSs58i+MDaOqFrVjZo9bAal0NK7HurRuWLLpF1iLX7gbWrjHjeo+YFg==} + + '@aws-crypto/util@5.2.0': + resolution: {integrity: sha512-4RkU9EsI6ZpBve5fseQlGNUWKMa1RLPQ1dnjnQoe07ldfIzcsGb5hC5W0Dm7u423KWzawlrpbjXBrXCEv9zazQ==} + + '@aws-sdk/client-cognito-identity@3.1000.0': + resolution: {integrity: sha512-7PtY49oxAo0rzkXZ1ulumtRL4QYi30Q5AMJtqJhYCHc1VZr0I2f0LHxiwovzquqUPzmTArgY6LjcPB7bkB/54w==} + engines: {node: '>=20.0.0'} + + '@aws-sdk/client-sagemaker@3.1000.0': + resolution: {integrity: sha512-990KuZpeI96pjeXdZN4vRuTOaFCF40+HoNhrqMxJdkm4FKvXfFT0V7yjHUAeUz6yubviqV1yhKsuWnK5HD6X2g==} + engines: {node: '>=20.0.0'} + + '@aws-sdk/core@3.973.15': + resolution: {integrity: sha512-AlC0oQ1/mdJ8vCIqu524j5RB7M8i8E24bbkZmya1CuiQxkY7SdIZAyw7NDNMGaNINQFq/8oGRMX0HeOfCVsl/A==} + engines: {node: '>=20.0.0'} + + '@aws-sdk/credential-provider-cognito-identity@3.972.6': + resolution: {integrity: sha512-RJqEZYFoXkBTVCwSJuYFd311qc/Q/cBJ8BH08+ggX/rUTWw47TUEyZlxzyTlKfP7DoXG4Khu/TX+pzU6godEGQ==} + engines: {node: '>=20.0.0'} + + '@aws-sdk/credential-provider-env@3.972.13': + resolution: {integrity: sha512-6ljXKIQ22WFKyIs1jbORIkGanySBHaPPTOI4OxACP5WXgbcR0nDYfqNJfXEGwCK7IzHdNbCSFsNKKs0qCexR8Q==} + engines: {node: '>=20.0.0'} + + '@aws-sdk/credential-provider-http@3.972.15': + resolution: {integrity: sha512-dJuSTreu/T8f24SHDNTjd7eQ4rabr0TzPh2UTCwYexQtzG3nTDKm1e5eIdhiroTMDkPEJeY+WPkA6F9wod/20A==} + engines: {node: '>=20.0.0'} + + '@aws-sdk/credential-provider-ini@3.972.13': + resolution: {integrity: sha512-JKSoGb7XeabZLBJptpqoZIFbROUIS65NuQnEHGOpuT9GuuZwag2qciKANiDLFiYk4u8nSrJC9JIOnWKVvPVjeA==} + engines: {node: '>=20.0.0'} + + '@aws-sdk/credential-provider-login@3.972.13': + resolution: {integrity: sha512-RtYcrxdnJHKY8MFQGLltCURcjuMjnaQpAxPE6+/QEdDHHItMKZgabRe/KScX737F9vJMQsmJy9EmMOkCnoC1JQ==} + engines: {node: '>=20.0.0'} + + '@aws-sdk/credential-provider-node@3.972.14': + resolution: {integrity: sha512-WqoC2aliIjQM/L3oFf6j+op/enT2i9Cc4UTxxMEKrJNECkq4/PlKE5BOjSYFcq6G9mz65EFbXJh7zOU4CvjSKQ==} + engines: {node: '>=20.0.0'} + + '@aws-sdk/credential-provider-process@3.972.13': + resolution: {integrity: sha512-rsRG0LQA4VR+jnDyuqtXi2CePYSmfm5GNL9KxiW8DSe25YwJSr06W8TdUfONAC+rjsTI+aIH2rBGG5FjMeANrw==} + engines: {node: '>=20.0.0'} + + '@aws-sdk/credential-provider-sso@3.972.13': + resolution: {integrity: sha512-fr0UU1wx8kNHDhTQBXioc/YviSW8iXuAxHvnH7eQUtn8F8o/FU3uu6EUMvAQgyvn7Ne5QFnC0Cj0BFlwCk+RFw==} + engines: {node: '>=20.0.0'} + + '@aws-sdk/credential-provider-web-identity@3.972.13': + resolution: {integrity: sha512-a6iFMh1pgUH0TdcouBppLJUfPM7Yd3R9S1xFodPtCRoLqCz2RQFA3qjA8x4112PVYXEd4/pHX2eihapq39w0rA==} + engines: {node: '>=20.0.0'} + + '@aws-sdk/credential-providers@3.1000.0': + resolution: {integrity: sha512-J0pBgTZ2b3UCnj+NQTPtWYjrEUne2aGwq1Xuuw8P2cIMpPBYJc39e59oYoRGpNseUXqcjkh0nLtWqZREEeMvkg==} + engines: {node: '>=20.0.0'} + + '@aws-sdk/middleware-host-header@3.972.6': + resolution: {integrity: sha512-5XHwjPH1lHB+1q4bfC7T8Z5zZrZXfaLcjSMwTd1HPSPrCmPFMbg3UQ5vgNWcVj0xoX4HWqTGkSf2byrjlnRg5w==} + engines: {node: '>=20.0.0'} + + '@aws-sdk/middleware-logger@3.972.6': + resolution: {integrity: sha512-iFnaMFMQdljAPrvsCVKYltPt2j40LQqukAbXvW7v0aL5I+1GO7bZ/W8m12WxW3gwyK5p5u1WlHg8TSAizC5cZw==} + engines: {node: '>=20.0.0'} + + '@aws-sdk/middleware-recursion-detection@3.972.6': + resolution: {integrity: sha512-dY4v3of5EEMvik6+UDwQ96KfUFDk8m1oZDdkSc5lwi4o7rFrjnv0A+yTV+gu230iybQZnKgDLg/rt2P3H+Vscw==} + engines: {node: '>=20.0.0'} + + '@aws-sdk/middleware-user-agent@3.972.15': + resolution: {integrity: sha512-ABlFVcIMmuRAwBT+8q5abAxOr7WmaINirDJBnqGY5b5jSDo00UMlg/G4a0xoAgwm6oAECeJcwkvDlxDwKf58fQ==} + engines: {node: '>=20.0.0'} + + '@aws-sdk/nested-clients@3.996.3': + resolution: {integrity: sha512-AU5TY1V29xqwg/MxmA2odwysTez+ccFAhmfRJk+QZT5HNv90UTA9qKd1J9THlsQkvmH7HWTEV1lDNxkQO5PzNw==} + engines: {node: '>=20.0.0'} + + '@aws-sdk/region-config-resolver@3.972.6': + resolution: {integrity: sha512-Aa5PusHLXAqLTX1UKDvI3pHQJtIsF7Q+3turCHqfz/1F61/zDMWfbTC8evjhrrYVAtz9Vsv3SJ/waSUeu7B6gw==} + engines: {node: '>=20.0.0'} + + '@aws-sdk/token-providers@3.999.0': + resolution: {integrity: sha512-cx0hHUlgXULfykx4rdu/ciNAJaa3AL5xz3rieCz7NKJ68MJwlj3664Y8WR5MGgxfyYJBdamnkjNSx5Kekuc0cg==} + engines: {node: '>=20.0.0'} + + '@aws-sdk/types@3.973.4': + resolution: {integrity: sha512-RW60aH26Bsc016Y9B98hC0Plx6fK5P2v/iQYwMzrSjiDh1qRMUCP6KrXHYEHe3uFvKiOC93Z9zk4BJsUi6Tj1Q==} + engines: {node: '>=20.0.0'} + + '@aws-sdk/util-endpoints@3.996.3': + resolution: {integrity: sha512-yWIQSNiCjykLL+ezN5A+DfBb1gfXTytBxm57e64lYmwxDHNmInYHRJYYRAGWG1o77vKEiWaw4ui28e3yb1k5aQ==} + engines: {node: '>=20.0.0'} + + '@aws-sdk/util-locate-window@3.965.4': + resolution: {integrity: sha512-H1onv5SkgPBK2P6JR2MjGgbOnttoNzSPIRoeZTNPZYyaplwGg50zS3amXvXqF0/qfXpWEC9rLWU564QTB9bSog==} + engines: {node: '>=20.0.0'} + + '@aws-sdk/util-user-agent-browser@3.972.6': + resolution: {integrity: sha512-Fwr/llD6GOrFgQnKaI2glhohdGuBDfHfora6iG9qsBBBR8xv1SdCSwbtf5CWlUdCw5X7g76G/9Hf0Inh0EmoxA==} + + '@aws-sdk/util-user-agent-node@3.973.0': + resolution: {integrity: sha512-A9J2G4Nf236e9GpaC1JnA8wRn6u6GjnOXiTwBLA6NUJhlBTIGfrTy+K1IazmF8y+4OFdW3O5TZlhyspJMqiqjA==} + engines: {node: '>=20.0.0'} + peerDependencies: + aws-crt: '>=1.0.0' + peerDependenciesMeta: + aws-crt: + optional: true + + '@aws-sdk/xml-builder@3.972.8': + resolution: {integrity: sha512-Ql8elcUdYCha83Ol7NznBsgN5GVZnv3vUd86fEc6waU6oUdY0T1O9NODkEEOS/Uaogr87avDrUC6DSeM4oXjZg==} + engines: {node: '>=20.0.0'} + + '@aws/lambda-invoke-store@0.2.3': + resolution: {integrity: sha512-oLvsaPMTBejkkmHhjf09xTgk71mOqyr/409NKhRIL08If7AhVfUsJhVsx386uJaqNd42v9kWamQ9lFbkoC2dYw==} + engines: {node: '>=18.0.0'} + + '@browserbasehq/sdk@2.7.0': + resolution: {integrity: sha512-1iwuj3fChplMq+S66M9tGb9ZXA4e7Vi8MjqQQ6/T6rzoAWLGfDnEAPbgTOU479o+Mi3of5/6YXk1oIHKTw0NBw==} + + '@bufbuild/protobuf@2.11.0': + resolution: {integrity: sha512-sBXGT13cpmPR5BMgHE6UEEfEaShh5Ror6rfN3yEK5si7QVrtZg8LEPQb0VVhiLRUslD2yLnXtnRzG035J/mZXQ==} + + '@eslint-community/eslint-utils@4.9.1': + resolution: {integrity: sha512-phrYmNiYppR7znFEdqgfWHXR6NCkZEK7hwWDHZUjit/2/U0r6XvkDl0SYnoM51Hq7FhCGdLDT6zxCCOY1hexsQ==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + peerDependencies: + eslint: ^6.0.0 || ^7.0.0 || >=8.0.0 + + '@eslint-community/regexpp@4.12.2': + resolution: {integrity: sha512-EriSTlt5OC9/7SXkRSCAhfSxxoSUgBm33OH+IkwbdpgoqsSsUg7y3uh+IICI/Qg4BBWr3U2i39RpmycbxMq4ew==} + engines: {node: ^12.0.0 || ^14.0.0 || >=16.0.0} + + '@eslint/eslintrc@2.1.4': + resolution: {integrity: sha512-269Z39MS6wVJtsoUl10L60WdkhJVdPG24Q4eZTH3nnF6lpvSShEK3wQjDX9JRWAUPvPh7COouPpU9IrqaZFvtQ==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + + '@eslint/js@8.57.1': + resolution: {integrity: sha512-d9zaMRSTIKDLhctzH12MtXvJKSSUhaHcjV+2Z+GK+EEY7XKpP5yR4x+N3TAcHTcu963nIr+TMcCb4DBCYX1z6Q==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + + '@google-cloud/promisify@4.1.0': + resolution: {integrity: sha512-G/FQx5cE/+DqBbOpA5jKsegGwdPniU6PuIEMt+qxWgFxvxuFOzVmp6zYchtYuwAWV5/8Dgs0yAmjvNZv3uXLQg==} + engines: {node: '>=18'} + + '@google-cloud/vision@4.3.3': + resolution: {integrity: sha512-ZEGXlZ22ZXlCxorUOVTCOmwufQ7p48qz9NME3hwnxqFzSdfMXsGtxzXhlYanbuz505n/cjbTHWp/ORCzHfv4rg==} + engines: {node: '>=14.0.0'} + + '@grpc/grpc-js@1.14.3': + resolution: {integrity: sha512-Iq8QQQ/7X3Sac15oB6p0FmUg/klxQvXLeileoqrTRGJYLV+/9tubbr9ipz0GKHjmXVsgFPo/+W+2cA8eNcR+XA==} + engines: {node: '>=12.10.0'} + + '@grpc/proto-loader@0.7.15': + resolution: {integrity: sha512-tMXdRCfYVixjuFK+Hk0Q1s38gV9zDiDJfWL3h1rv4Qc39oILCu1TRTDt7+fGUI8K4G1Fj125Hx/ru3azECWTyQ==} + engines: {node: '>=6'} + hasBin: true + + '@grpc/proto-loader@0.8.0': + resolution: {integrity: sha512-rc1hOQtjIWGxcxpb9aHAfLpIctjEnsDehj0DAiVfBlmT84uvR0uUtN2hEi/ecvWVjXUGf5qPF4qEgiLOx1YIMQ==} + engines: {node: '>=6'} + hasBin: true + + '@hatchet-dev/typescript-sdk@file:../../typescript': + resolution: {directory: ../../typescript, type: directory} + + '@humanwhocodes/config-array@0.13.0': + resolution: {integrity: sha512-DZLEEqFWQFiyK6h5YIeynKx7JlvCYWL0cImfSRXZ9l4Sg2efkFGTuFf6vzXjK1cq6IYkU+Eg/JizXw+TD2vRNw==} + engines: {node: '>=10.10.0'} + deprecated: Use @eslint/config-array instead + + '@humanwhocodes/module-importer@1.0.1': + resolution: {integrity: sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==} + engines: {node: '>=12.22'} + + '@humanwhocodes/object-schema@2.0.3': + resolution: {integrity: sha512-93zYdMES/c1D69yZiKDBj0V24vqNzB/koF26KPaagAfd3P/4gUlh3Dys5ogAK+Exi9QyzlD8x/08Zt7wIKcDcA==} + deprecated: Use @eslint/object-schema instead + + '@js-sdsl/ordered-map@4.4.2': + resolution: {integrity: sha512-iUKgm52T8HOE/makSxjqoWhe95ZJA1/G1sYsGev2JDKUSS14KAgg1LHb+Ba+IPow0xflbnSkOsZcO08C7w1gYw==} + + '@mendable/firecrawl-js@4.15.0': + resolution: {integrity: sha512-lyszXaz1QFIPTlii/4caCugE4W+vbYpvJdIQ+8564ZIdjUluk4uwq5zuz8NBCpph78OPhErC0SHh+bBzeSGumQ==} + engines: {node: '>=22.0.0'} + + '@nodelib/fs.scandir@2.1.5': + resolution: {integrity: sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==} + engines: {node: '>= 8'} + + '@nodelib/fs.stat@2.0.5': + resolution: {integrity: sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==} + engines: {node: '>= 8'} + + '@nodelib/fs.walk@1.2.8': + resolution: {integrity: sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==} + engines: {node: '>= 8'} + + '@opentelemetry/api@1.9.0': + resolution: {integrity: sha512-3giAOQvZiH5F9bMlMiv8+GSPMeqg0dbaeo58/0SlA9sxSqZhnUtxzX9/2FzyhS9sWQf5S0GJE0AKBrFqjpeYcg==} + engines: {node: '>=8.0.0'} + + '@protobufjs/aspromise@1.1.2': + resolution: {integrity: sha512-j+gKExEuLmKwvz3OgROXtrJ2UG2x8Ch2YZUxahh+s1F2HZ+wAceUNLkvy6zKCPVRkU++ZWQrdxsUeQXmcg4uoQ==} + + '@protobufjs/base64@1.1.2': + resolution: {integrity: sha512-AZkcAA5vnN/v4PDqKyMR5lx7hZttPDgClv83E//FMNhR2TMcLUhfRUBHCmSl0oi9zMgDDqRUJkSxO3wm85+XLg==} + + '@protobufjs/codegen@2.0.4': + resolution: {integrity: sha512-YyFaikqM5sH0ziFZCN3xDC7zeGaB/d0IUb9CATugHWbd1FRFwWwt4ld4OYMPWu5a3Xe01mGAULCdqhMlPl29Jg==} + + '@protobufjs/eventemitter@1.1.0': + resolution: {integrity: sha512-j9ednRT81vYJ9OfVuXG6ERSTdEL1xVsNgqpkxMsbIabzSo3goCjDIveeGv5d03om39ML71RdmrGNjG5SReBP/Q==} + + '@protobufjs/fetch@1.1.0': + resolution: {integrity: sha512-lljVXpqXebpsijW71PZaCYeIcE5on1w5DlQy5WH6GLbFryLUrBD4932W/E2BSpfRJWseIL4v/KPgBFxDOIdKpQ==} + + '@protobufjs/float@1.0.2': + resolution: {integrity: sha512-Ddb+kVXlXst9d+R9PfTIxh1EdNkgoRe5tOX6t01f1lYWOvJnSPDBlG241QLzcyPdoNTsblLUdujGSE4RzrTZGQ==} + + '@protobufjs/inquire@1.1.0': + resolution: {integrity: sha512-kdSefcPdruJiFMVSbn801t4vFK7KB/5gd2fYvrxhuJYg8ILrmn9SKSX2tZdV6V+ksulWqS7aXjBcRXl3wHoD9Q==} + + '@protobufjs/path@1.1.2': + resolution: {integrity: sha512-6JOcJ5Tm08dOHAbdR3GrvP+yUUfkjG5ePsHYczMFLq3ZmMkAD98cDgcT2iA1lJ9NVwFd4tH/iSSoe44YWkltEA==} + + '@protobufjs/pool@1.1.0': + resolution: {integrity: sha512-0kELaGSIDBKvcgS4zkjz1PeddatrjYcmMWOlAuAPwAeccUrPHdUqo/J6LiymHHEiJT5NrF1UVwxY14f+fy4WQw==} + + '@protobufjs/utf8@1.1.0': + resolution: {integrity: sha512-Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw==} + + '@rtsao/scc@1.1.0': + resolution: {integrity: sha512-zt6OdqaDoOnJ1ZYsCYGt9YmWzDXl4vQdKTyJev62gFhRGKdx7mcT54V9KIjg+d2wi9EXsPvAPKe7i7WjfVWB8g==} + + '@smithy/abort-controller@4.2.10': + resolution: {integrity: sha512-qocxM/X4XGATqQtUkbE9SPUB6wekBi+FyJOMbPj0AhvyvFGYEmOlz6VB22iMePCQsFmMIvFSeViDvA7mZJG47g==} + engines: {node: '>=18.0.0'} + + '@smithy/config-resolver@4.4.9': + resolution: {integrity: sha512-ejQvXqlcU30h7liR9fXtj7PIAau1t/sFbJpgWPfiYDs7zd16jpH0IsSXKcba2jF6ChTXvIjACs27kNMc5xxE2Q==} + engines: {node: '>=18.0.0'} + + '@smithy/core@3.23.6': + resolution: {integrity: sha512-4xE+0L2NrsFKpEVFlFELkIHQddBvMbQ41LRIP74dGCXnY1zQ9DgksrBcRBDJT+iOzGy4VEJIeU3hkUK5mn06kg==} + engines: {node: '>=18.0.0'} + + '@smithy/credential-provider-imds@4.2.10': + resolution: {integrity: sha512-3bsMLJJLTZGZqVGGeBVFfLzuRulVsGTj12BzRKODTHqUABpIr0jMN1vN3+u6r2OfyhAQ2pXaMZWX/swBK5I6PQ==} + engines: {node: '>=18.0.0'} + + '@smithy/fetch-http-handler@5.3.11': + resolution: {integrity: sha512-wbTRjOxdFuyEg0CpumjZO0hkUl+fetJFqxNROepuLIoijQh51aMBmzFLfoQdwRjxsuuS2jizzIUTjPWgd8pd7g==} + engines: {node: '>=18.0.0'} + + '@smithy/hash-node@4.2.10': + resolution: {integrity: sha512-1VzIOI5CcsvMDvP3iv1vG/RfLJVVVc67dCRyLSB2Hn9SWCZrDO3zvcIzj3BfEtqRW5kcMg5KAeVf1K3dR6nD3w==} + engines: {node: '>=18.0.0'} + + '@smithy/invalid-dependency@4.2.10': + resolution: {integrity: sha512-vy9KPNSFUU0ajFYk0sDZIYiUlAWGEAhRfehIr5ZkdFrRFTAuXEPUd41USuqHU6vvLX4r6Q9X7MKBco5+Il0Org==} + engines: {node: '>=18.0.0'} + + '@smithy/is-array-buffer@2.2.0': + resolution: {integrity: sha512-GGP3O9QFD24uGeAXYUjwSTXARoqpZykHadOmA8G5vfJPK0/DC67qa//0qvqrJzL1xc8WQWX7/yc7fwudjPHPhA==} + engines: {node: '>=14.0.0'} + + '@smithy/is-array-buffer@4.2.1': + resolution: {integrity: sha512-Yfu664Qbf1B4IYIsYgKoABt010daZjkaCRvdU/sPnZG6TtHOB0md0RjNdLGzxe5UIdn9js4ftPICzmkRa9RJ4Q==} + engines: {node: '>=18.0.0'} + + '@smithy/middleware-content-length@4.2.10': + resolution: {integrity: sha512-TQZ9kX5c6XbjhaEBpvhSvMEZ0klBs1CFtOdPFwATZSbC9UeQfKHPLPN9Y+I6wZGMOavlYTOlHEPDrt42PMSH9w==} + engines: {node: '>=18.0.0'} + + '@smithy/middleware-endpoint@4.4.20': + resolution: {integrity: sha512-9W6Np4ceBP3XCYAGLoMCmn8t2RRVzuD1ndWPLBbv7H9CrwM9Bprf6Up6BM9ZA/3alodg0b7Kf6ftBK9R1N04vw==} + engines: {node: '>=18.0.0'} + + '@smithy/middleware-retry@4.4.37': + resolution: {integrity: sha512-/1psZZllBBSQ7+qo5+hhLz7AEPGLx3Z0+e3ramMBEuPK2PfvLK4SrncDB9VegX5mBn+oP/UTDrM6IHrFjvX1ZA==} + engines: {node: '>=18.0.0'} + + '@smithy/middleware-serde@4.2.11': + resolution: {integrity: sha512-STQdONGPwbbC7cusL60s7vOa6He6A9w2jWhoapL0mgVjmR19pr26slV+yoSP76SIssMTX/95e5nOZ6UQv6jolg==} + engines: {node: '>=18.0.0'} + + '@smithy/middleware-stack@4.2.10': + resolution: {integrity: sha512-pmts/WovNcE/tlyHa8z/groPeOtqtEpp61q3W0nW1nDJuMq/x+hWa/OVQBtgU0tBqupeXq0VBOLA4UZwE8I0YA==} + engines: {node: '>=18.0.0'} + + '@smithy/node-config-provider@4.3.10': + resolution: {integrity: sha512-UALRbJtVX34AdP2VECKVlnNgidLHA2A7YgcJzwSBg1hzmnO/bZBHl/LDQQyYifzUwp1UOODnl9JJ3KNawpUJ9w==} + engines: {node: '>=18.0.0'} + + '@smithy/node-http-handler@4.4.12': + resolution: {integrity: sha512-zo1+WKJkR9x7ZtMeMDAAsq2PufwiLDmkhcjpWPRRkmeIuOm6nq1qjFICSZbnjBvD09ei8KMo26BWxsu2BUU+5w==} + engines: {node: '>=18.0.0'} + + '@smithy/property-provider@4.2.10': + resolution: {integrity: sha512-5jm60P0CU7tom0eNrZ7YrkgBaoLFXzmqB0wVS+4uK8PPGmosSrLNf6rRd50UBvukztawZ7zyA8TxlrKpF5z9jw==} + engines: {node: '>=18.0.0'} + + '@smithy/protocol-http@5.3.10': + resolution: {integrity: sha512-2NzVWpYY0tRdfeCJLsgrR89KE3NTWT2wGulhNUxYlRmtRmPwLQwKzhrfVaiNlA9ZpJvbW7cjTVChYKgnkqXj1A==} + engines: {node: '>=18.0.0'} + + '@smithy/querystring-builder@4.2.10': + resolution: {integrity: sha512-HeN7kEvuzO2DmAzLukE9UryiUvejD3tMp9a1D1NJETerIfKobBUCLfviP6QEk500166eD2IATaXM59qgUI+YDA==} + engines: {node: '>=18.0.0'} + + '@smithy/querystring-parser@4.2.10': + resolution: {integrity: sha512-4Mh18J26+ao1oX5wXJfWlTT+Q1OpDR8ssiC9PDOuEgVBGloqg18Fw7h5Ct8DyT9NBYwJgtJ2nLjKKFU6RP1G1Q==} + engines: {node: '>=18.0.0'} + + '@smithy/service-error-classification@4.2.10': + resolution: {integrity: sha512-0R/+/Il5y8nB/By90o8hy/bWVYptbIfvoTYad0igYQO5RefhNCDmNzqxaMx7K1t/QWo0d6UynqpqN5cCQt1MCg==} + engines: {node: '>=18.0.0'} + + '@smithy/shared-ini-file-loader@4.4.5': + resolution: {integrity: sha512-pHgASxl50rrtOztgQCPmOXFjRW+mCd7ALr/3uXNzRrRoGV5G2+78GOsQ3HlQuBVHCh9o6xqMNvlIKZjWn4Euug==} + engines: {node: '>=18.0.0'} + + '@smithy/signature-v4@5.3.10': + resolution: {integrity: sha512-Wab3wW8468WqTKIxI+aZe3JYO52/RYT/8sDOdzkUhjnLakLe9qoQqIcfih/qxcF4qWEFoWBszY0mj5uxffaVXA==} + engines: {node: '>=18.0.0'} + + '@smithy/smithy-client@4.12.0': + resolution: {integrity: sha512-R8bQ9K3lCcXyZmBnQqUZJF4ChZmtWT5NLi6x5kgWx5D+/j0KorXcA0YcFg/X5TOgnTCy1tbKc6z2g2y4amFupQ==} + engines: {node: '>=18.0.0'} + + '@smithy/types@4.13.0': + resolution: {integrity: sha512-COuLsZILbbQsdrwKQpkkpyep7lCsByxwj7m0Mg5v66/ZTyenlfBc40/QFQ5chO0YN/PNEH1Bi3fGtfXPnYNeDw==} + engines: {node: '>=18.0.0'} + + '@smithy/url-parser@4.2.10': + resolution: {integrity: sha512-uypjF7fCDsRk26u3qHmFI/ePL7bxxB9vKkE+2WKEciHhz+4QtbzWiHRVNRJwU3cKhrYDYQE3b0MRFtqfLYdA4A==} + engines: {node: '>=18.0.0'} + + '@smithy/util-base64@4.3.1': + resolution: {integrity: sha512-BKGuawX4Doq/bI/uEmg+Zyc36rJKWuin3py89PquXBIBqmbnJwBBsmKhdHfNEp0+A4TDgLmT/3MSKZ1SxHcR6w==} + engines: {node: '>=18.0.0'} + + '@smithy/util-body-length-browser@4.2.1': + resolution: {integrity: sha512-SiJeLiozrAoCrgDBUgsVbmqHmMgg/2bA15AzcbcW+zan7SuyAVHN4xTSbq0GlebAIwlcaX32xacnrG488/J/6g==} + engines: {node: '>=18.0.0'} + + '@smithy/util-body-length-node@4.2.2': + resolution: {integrity: sha512-4rHqBvxtJEBvsZcFQSPQqXP2b/yy/YlB66KlcEgcH2WNoOKCKB03DSLzXmOsXjbl8dJ4OEYTn31knhdznwk7zw==} + engines: {node: '>=18.0.0'} + + '@smithy/util-buffer-from@2.2.0': + resolution: {integrity: sha512-IJdWBbTcMQ6DA0gdNhh/BwrLkDR+ADW5Kr1aZmd4k3DIF6ezMV4R2NIAmT08wQJ3yUK82thHWmC/TnK/wpMMIA==} + engines: {node: '>=14.0.0'} + + '@smithy/util-buffer-from@4.2.1': + resolution: {integrity: sha512-/swhmt1qTiVkaejlmMPPDgZhEaWb/HWMGRBheaxwuVkusp/z+ErJyQxO6kaXumOciZSWlmq6Z5mNylCd33X7Ig==} + engines: {node: '>=18.0.0'} + + '@smithy/util-config-provider@4.2.1': + resolution: {integrity: sha512-462id/00U8JWFw6qBuTSWfN5TxOHvDu4WliI97qOIOnuC/g+NDAknTU8eoGXEPlLkRVgWEr03jJBLV4o2FL8+A==} + engines: {node: '>=18.0.0'} + + '@smithy/util-defaults-mode-browser@4.3.36': + resolution: {integrity: sha512-R0smq7EHQXRVMxkAxtH5akJ/FvgAmNF6bUy/GwY/N20T4GrwjT633NFm0VuRpC+8Bbv8R9A0DoJ9OiZL/M3xew==} + engines: {node: '>=18.0.0'} + + '@smithy/util-defaults-mode-node@4.2.39': + resolution: {integrity: sha512-otWuoDm35btJV1L8MyHrPl462B07QCdMTktKc7/yM+Psv6KbED/ziXiHnmr7yPHUjfIwE9S8Max0LO24Mo3ZVg==} + engines: {node: '>=18.0.0'} + + '@smithy/util-endpoints@3.3.1': + resolution: {integrity: sha512-xyctc4klmjmieQiF9I1wssBWleRV0RhJ2DpO8+8yzi2LO1Z+4IWOZNGZGNj4+hq9kdo+nyfrRLmQTzc16Op2Vg==} + engines: {node: '>=18.0.0'} + + '@smithy/util-hex-encoding@4.2.1': + resolution: {integrity: sha512-c1hHtkgAWmE35/50gmdKajgGAKV3ePJ7t6UtEmpfCWJmQE9BQAQPz0URUVI89eSkcDqCtzqllxzG28IQoZPvwA==} + engines: {node: '>=18.0.0'} + + '@smithy/util-middleware@4.2.10': + resolution: {integrity: sha512-LxaQIWLp4y0r72eA8mwPNQ9va4h5KeLM0I3M/HV9klmFaY2kN766wf5vsTzmaOpNNb7GgXAd9a25P3h8T49PSA==} + engines: {node: '>=18.0.0'} + + '@smithy/util-retry@4.2.10': + resolution: {integrity: sha512-HrBzistfpyE5uqTwiyLsFHscgnwB0kgv8vySp7q5kZ0Eltn/tjosaSGGDj/jJ9ys7pWzIP/icE2d+7vMKXLv7A==} + engines: {node: '>=18.0.0'} + + '@smithy/util-stream@4.5.15': + resolution: {integrity: sha512-OlOKnaqnkU9X+6wEkd7mN+WB7orPbCVDauXOj22Q7VtiTkvy7ZdSsOg4QiNAZMgI4OkvNf+/VLUC3VXkxuWJZw==} + engines: {node: '>=18.0.0'} + + '@smithy/util-uri-escape@4.2.1': + resolution: {integrity: sha512-YmiUDn2eo2IOiWYYvGQkgX5ZkBSiTQu4FlDo5jNPpAxng2t6Sjb6WutnZV9l6VR4eJul1ABmCrnWBC9hKHQa6Q==} + engines: {node: '>=18.0.0'} + + '@smithy/util-utf8@2.3.0': + resolution: {integrity: sha512-R8Rdn8Hy72KKcebgLiv8jQcQkXoLMOGGv5uI1/k0l+snqkOzQ1R0ChUBCxWMlBsFMekWjq0wRudIweFs7sKT5A==} + engines: {node: '>=14.0.0'} + + '@smithy/util-utf8@4.2.1': + resolution: {integrity: sha512-DSIwNaWtmzrNQHv8g7DBGR9mulSit65KSj5ymGEIAknmIN8IpbZefEep10LaMG/P/xquwbmJ1h9ectz8z6mV6g==} + engines: {node: '>=18.0.0'} + + '@smithy/util-waiter@4.2.10': + resolution: {integrity: sha512-4eTWph/Lkg1wZEDAyObwme0kmhEb7J/JjibY2znJdrYRgKbKqB7YoEhhJVJ4R1g/SYih4zuwX7LpJaM8RsnTVg==} + engines: {node: '>=18.0.0'} + + '@smithy/uuid@1.1.1': + resolution: {integrity: sha512-dSfDCeihDmZlV2oyr0yWPTUfh07suS+R5OB+FZGiv/hHyK3hrFBW5rR1UYjfa57vBsrP9lciFkRPzebaV1Qujw==} + engines: {node: '>=18.0.0'} + + '@tootallnate/once@2.0.0': + resolution: {integrity: sha512-XCuKFP5PS55gnMVu3dty8KPatLqUoy/ZYzDzAGCQ8JNFCkLXzmI7vNHCR+XpbZaMWQK/vQubr7PkYq8g470J/A==} + engines: {node: '>= 10'} + + '@types/caseless@0.12.5': + resolution: {integrity: sha512-hWtVTC2q7hc7xZ/RLbxapMvDMgUnDvKvMOpKal4DrMyfGBUfB1oKaZlIRr6mJL+If3bAP6sV/QneGzF6tJjZDg==} + + '@types/diff-match-patch@1.0.36': + resolution: {integrity: sha512-xFdR6tkm0MWvBfO8xXCSsinYxHcqkQUlcHeSpMC2ukzOb6lwQAfDmW+Qt0AvlGd8HpsS28qKsB+oPeJn9I39jg==} + + '@types/json-schema@7.0.15': + resolution: {integrity: sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==} + + '@types/json5@0.0.29': + resolution: {integrity: sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ==} + + '@types/long@4.0.2': + resolution: {integrity: sha512-MqTGEo5bj5t157U6fA/BiDynNkn0YknVdh48CMPkTSpFTVmvao5UQmm7uEF6xBEo7qIMAlY/JSleYaE6VOdpaA==} + + '@types/node-fetch@2.6.13': + resolution: {integrity: sha512-QGpRVpzSaUs30JBSGPjOg4Uveu384erbHBoT1zeONvyCfwQxIkUshLAOqN/k9EjGviPRmWTTe6aH2qySWKTVSw==} + + '@types/node@18.19.130': + resolution: {integrity: sha512-GRaXQx6jGfL8sKfaIDD6OupbIHBr9jv7Jnaml9tB7l4v068PAOXqfcujMMo5PhbIs6ggR1XODELqahT2R8v0fg==} + + '@types/node@22.19.13': + resolution: {integrity: sha512-akNQMv0wW5uyRpD2v2IEyRSZiR+BeGuoB6L310EgGObO44HSMNT8z1xzio28V8qOrgYaopIDNA18YgdXd+qTiw==} + + '@types/qs@6.14.0': + resolution: {integrity: sha512-eOunJqu0K1923aExK6y8p6fsihYEn/BYuQ4g0CxAAgFc4b/ZLN4CrsRZ55srTdqoiLzU2B2evC+apEIxprEzkQ==} + + '@types/request@2.48.13': + resolution: {integrity: sha512-FGJ6udDNUCjd19pp0Q3iTiDkwhYup7J8hpMW9c4k53NrccQFFWKRho6hvtPPEhnXWKvukfwAlB6DbDz4yhH5Gg==} + + '@types/semver@7.7.1': + resolution: {integrity: sha512-FmgJfu+MOcQ370SD0ev7EI8TlCAfKYU+B4m5T3yXc1CiRN94g/SZPtsCkk506aUDtlMnFZvasDwHHUcZUEaYuA==} + + '@types/tough-cookie@4.0.5': + resolution: {integrity: sha512-/Ad8+nIOV7Rl++6f1BdKxFSMgmoqEoYbHRpPcx3JEfv8VRsQe9Z4mCXeJBzxs7mbHY/XOZZuXlRNfhpVPbs6ZA==} + + '@typescript-eslint/eslint-plugin@6.21.0': + resolution: {integrity: sha512-oy9+hTPCUFpngkEZUSzbf9MxI65wbKFoQYsgPdILTfbUldp5ovUuphZVe4i30emU9M/kP+T64Di0mxl7dSw3MA==} + engines: {node: ^16.0.0 || >=18.0.0} + peerDependencies: + '@typescript-eslint/parser': ^6.0.0 || ^6.0.0-alpha + eslint: ^7.0.0 || ^8.0.0 + typescript: '*' + peerDependenciesMeta: + typescript: + optional: true + + '@typescript-eslint/parser@6.21.0': + resolution: {integrity: sha512-tbsV1jPne5CkFQCgPBcDOt30ItF7aJoZL997JSF7MhGQqOeT3svWRYxiqlfA5RUdlHN6Fi+EI9bxqbdyAUZjYQ==} + engines: {node: ^16.0.0 || >=18.0.0} + peerDependencies: + eslint: ^7.0.0 || ^8.0.0 + typescript: '*' + peerDependenciesMeta: + typescript: + optional: true + + '@typescript-eslint/scope-manager@6.21.0': + resolution: {integrity: sha512-OwLUIWZJry80O99zvqXVEioyniJMa+d2GrqpUTqi5/v5D5rOrppJVBPa0yKCblcigC0/aYAzxxqQ1B+DS2RYsg==} + engines: {node: ^16.0.0 || >=18.0.0} + + '@typescript-eslint/type-utils@6.21.0': + resolution: {integrity: sha512-rZQI7wHfao8qMX3Rd3xqeYSMCL3SoiSQLBATSiVKARdFGCYSRvmViieZjqc58jKgs8Y8i9YvVVhRbHSTA4VBag==} + engines: {node: ^16.0.0 || >=18.0.0} + peerDependencies: + eslint: ^7.0.0 || ^8.0.0 + typescript: '*' + peerDependenciesMeta: + typescript: + optional: true + + '@typescript-eslint/types@6.21.0': + resolution: {integrity: sha512-1kFmZ1rOm5epu9NZEZm1kckCDGj5UJEf7P1kliH4LKu/RkwpsfqqGmY2OOcUs18lSlQBKLDYBOGxRVtrMN5lpg==} + engines: {node: ^16.0.0 || >=18.0.0} + + '@typescript-eslint/typescript-estree@6.21.0': + resolution: {integrity: sha512-6npJTkZcO+y2/kr+z0hc4HwNfrrP4kNYh57ek7yCNlrBjWQ1Y0OS7jiZTkgumrvkX5HkEKXFZkkdFNkaW2wmUQ==} + engines: {node: ^16.0.0 || >=18.0.0} + peerDependencies: + typescript: '*' + peerDependenciesMeta: + typescript: + optional: true + + '@typescript-eslint/utils@6.21.0': + resolution: {integrity: sha512-NfWVaC8HP9T8cbKQxHcsJBY5YE1O33+jpMwN45qzWWaPDZgLIbo12toGMWnmhvCpd3sIxkpDw3Wv1B3dYrbDQQ==} + engines: {node: ^16.0.0 || >=18.0.0} + peerDependencies: + eslint: ^7.0.0 || ^8.0.0 + + '@typescript-eslint/visitor-keys@6.21.0': + resolution: {integrity: sha512-JJtkDduxLi9bivAB+cYOVMtbkqdPOhZ+ZI5LC47MIRrDV4Yn2o+ZnW10Nkmr28xRpSpdJ6Sm42Hjf2+REYXm0A==} + engines: {node: ^16.0.0 || >=18.0.0} + + '@ungap/structured-clone@1.3.0': + resolution: {integrity: sha512-WmoN8qaIAo7WTYWbAZuG8PYEhn5fkz7dZrqTBZ7dtt//lL2Gwms1IcnQ5yHqjDfX8Ft5j4YzDM23f87zBfDe9g==} + + abort-controller-x@0.4.3: + resolution: {integrity: sha512-VtUwTNU8fpMwvWGn4xE93ywbogTYsuT+AUxAXOeelbXuQVIwNmC5YLeho9sH4vZ4ITW8414TTAOG1nW6uIVHCA==} + + abort-controller@3.0.0: + resolution: {integrity: sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==} + engines: {node: '>=6.5'} + + acorn-jsx@5.3.2: + resolution: {integrity: sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==} + peerDependencies: + acorn: ^6.0.0 || ^7.0.0 || ^8.0.0 + + acorn@8.16.0: + resolution: {integrity: sha512-UVJyE9MttOsBQIDKw1skb9nAwQuR5wuGD3+82K6JgJlm/Y+KI92oNsMNGZCYdDsVtRHSak0pcV5Dno5+4jh9sw==} + engines: {node: '>=0.4.0'} + hasBin: true + + agent-base@6.0.2: + resolution: {integrity: sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==} + engines: {node: '>= 6.0.0'} + + agent-base@7.1.4: + resolution: {integrity: sha512-MnA+YT8fwfJPgBx3m60MNqakm30XOkyIoH1y6huTQvC0PwZG7ki8NacLBcrPbNoo8vEZy7Jpuk7+jMO+CUovTQ==} + engines: {node: '>= 14'} + + agentkeepalive@4.6.0: + resolution: {integrity: sha512-kja8j7PjmncONqaTsB8fQ+wE2mSU2DJ9D4XKoJ5PFWIdRMa6SLSN1ff4mOr4jCbfRSsxR4keIiySJU0N9T5hIQ==} + engines: {node: '>= 8.0.0'} + + ai@4.3.19: + resolution: {integrity: sha512-dIE2bfNpqHN3r6IINp9znguYdhIOheKW2LDigAMrgt/upT3B8eBGPSCblENvaZGoq+hxaN9fSMzjWpbqloP+7Q==} + engines: {node: '>=18'} + peerDependencies: + react: ^18 || ^19 || ^19.0.0-rc + zod: ^3.23.8 + peerDependenciesMeta: + react: + optional: true + + ajv@6.14.0: + resolution: {integrity: sha512-IWrosm/yrn43eiKqkfkHis7QioDleaXQHdDVPKg0FSwwd/DuvyX79TZnFOnYpB7dcsFAMmtFztZuXPDvSePkFw==} + + ansi-regex@5.0.1: + resolution: {integrity: sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==} + engines: {node: '>=8'} + + ansi-styles@4.3.0: + resolution: {integrity: sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==} + engines: {node: '>=8'} + + argparse@2.0.1: + resolution: {integrity: sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==} + + array-buffer-byte-length@1.0.2: + resolution: {integrity: sha512-LHE+8BuR7RYGDKvnrmcuSq3tDcKv9OFEXQt/HpbZhY7V6h0zlUXutnAD82GiFx9rdieCMjkvtcsPqBwgUl1Iiw==} + engines: {node: '>= 0.4'} + + array-includes@3.1.9: + resolution: {integrity: sha512-FmeCCAenzH0KH381SPT5FZmiA/TmpndpcaShhfgEN9eCVjnFBqq3l1xrI42y8+PPLI6hypzou4GXw00WHmPBLQ==} + engines: {node: '>= 0.4'} + + array-union@2.1.0: + resolution: {integrity: sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==} + engines: {node: '>=8'} + + array.prototype.findlastindex@1.2.6: + resolution: {integrity: sha512-F/TKATkzseUExPlfvmwQKGITM3DGTK+vkAsCZoDc5daVygbJBnjEUCbgkAvVFsgfXfX4YIqZ/27G3k3tdXrTxQ==} + engines: {node: '>= 0.4'} + + array.prototype.flat@1.3.3: + resolution: {integrity: sha512-rwG/ja1neyLqCuGZ5YYrznA62D4mZXg0i1cIskIUKSiqF3Cje9/wXAls9B9s1Wa2fomMsIv8czB8jZcPmxCXFg==} + engines: {node: '>= 0.4'} + + array.prototype.flatmap@1.3.3: + resolution: {integrity: sha512-Y7Wt51eKJSyi80hFrJCePGGNo5ktJCslFuboqJsbf57CCPcm5zztluPlc4/aD8sWsKvlwatezpV4U1efk8kpjg==} + engines: {node: '>= 0.4'} + + arraybuffer.prototype.slice@1.0.4: + resolution: {integrity: sha512-BNoCY6SXXPQ7gF2opIP4GBE+Xw7U+pHMYKuzjgCN3GwiaIR09UUeKfheyIry77QtrCBlC0KK0q5/TER/tYh3PQ==} + engines: {node: '>= 0.4'} + + async-function@1.0.0: + resolution: {integrity: sha512-hsU18Ae8CDTR6Kgu9DYf0EbCr/a5iGL0rytQDobUcdpYOKokk8LEjVphnXkDkgpi0wYVsqrXuP0bZxJaTqdgoA==} + engines: {node: '>= 0.4'} + + asynckit@0.4.0: + resolution: {integrity: sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==} + + available-typed-arrays@1.0.7: + resolution: {integrity: sha512-wvUjBtSGN7+7SjNpq/9M2Tg350UZD3q62IFZLbRAR1bSMlCo1ZaeW+BJ+D090e4hIIZLBcTDWe4Mh4jvUDajzQ==} + engines: {node: '>= 0.4'} + + axios@1.13.6: + resolution: {integrity: sha512-ChTCHMouEe2kn713WHbQGcuYrr6fXTBiu460OTwWrWob16g1bXn4vtz07Ope7ewMozJAnEquLk5lWQWtBig9DQ==} + + balanced-match@1.0.2: + resolution: {integrity: sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==} + + base64-js@1.5.1: + resolution: {integrity: sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==} + + bignumber.js@9.3.1: + resolution: {integrity: sha512-Ko0uX15oIUS7wJ3Rb30Fs6SkVbLmPBAKdlm7q9+ak9bbIeFf0MwuBsQV6z7+X768/cHsfg+WlysDWJcmthjsjQ==} + + bintrees@1.0.2: + resolution: {integrity: sha512-VOMgTMwjAaUG580SXn3LacVgjurrbMme7ZZNYGSSV7mmtY6QQRh0Eg3pwIcntQ77DErK1L0NxkbetjcoXzVwKw==} + + bmp-js@0.1.0: + resolution: {integrity: sha512-vHdS19CnY3hwiNdkaqk93DvjVLfbEcI8mys4UjuWrlX1haDmroo8o4xCzh4wD6DGV6HxRCyauwhHRqMTfERtjw==} + + bowser@2.14.1: + resolution: {integrity: sha512-tzPjzCxygAKWFOJP011oxFHs57HzIhOEracIgAePE4pqB3LikALKnSzUyU4MGs9/iCEUuHlAJTjTc5M+u7YEGg==} + + brace-expansion@1.1.12: + resolution: {integrity: sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==} + + brace-expansion@2.0.2: + resolution: {integrity: sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==} + + braces@3.0.3: + resolution: {integrity: sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==} + engines: {node: '>=8'} + + buffer-equal-constant-time@1.0.1: + resolution: {integrity: sha512-zRpUiDwd/xk6ADqPMATG8vc9VPrkck7T07OIx0gnjmJAnHnTVXNQG3vfvWNuiZIkwu9KrKdA1iJKfsfTVxE6NA==} + + buffer@6.0.3: + resolution: {integrity: sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==} + + call-bind-apply-helpers@1.0.2: + resolution: {integrity: sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==} + engines: {node: '>= 0.4'} + + call-bind@1.0.8: + resolution: {integrity: sha512-oKlSFMcMwpUg2ednkhQ454wfWiU/ul3CkJe/PEHcTKuiX6RpbehUiFMXu13HalGZxfUwCQzZG747YXBn1im9ww==} + engines: {node: '>= 0.4'} + + call-bound@1.0.4: + resolution: {integrity: sha512-+ys997U96po4Kx/ABpBCqhA9EuxJaQWDQg7295H4hBphv3IZg0boBKuwYpt4YXp6MZ5AmZQnU/tyMTlRpaSejg==} + engines: {node: '>= 0.4'} + + callsites@3.1.0: + resolution: {integrity: sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==} + engines: {node: '>=6'} + + chalk@4.1.2: + resolution: {integrity: sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==} + engines: {node: '>=10'} + + chalk@5.6.2: + resolution: {integrity: sha512-7NzBL0rN6fMUW+f7A6Io4h40qQlG+xGmtMxfbnH/K7TAtt8JQWVQK+6g0UXKMeVJoyV5EkkNsErQ8pVD3bLHbA==} + engines: {node: ^12.17.0 || ^14.13 || >=16.0.0} + + cliui@8.0.1: + resolution: {integrity: sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==} + engines: {node: '>=12'} + + cohere-ai@7.20.0: + resolution: {integrity: sha512-h/3h3pcLXRUmkzp/W+/FWViEMcAFtSZ8YayCTFQXpib112uNSj3feApOtJg7v9lreWR1t7gznhE6N9KNCX5FOA==} + engines: {node: '>=18.0.0'} + + color-convert@2.0.1: + resolution: {integrity: sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==} + engines: {node: '>=7.0.0'} + + color-name@1.1.4: + resolution: {integrity: sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==} + + combined-stream@1.0.8: + resolution: {integrity: sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==} + engines: {node: '>= 0.8'} + + concat-map@0.0.1: + resolution: {integrity: sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==} + + confusing-browser-globals@1.0.11: + resolution: {integrity: sha512-JsPKdmh8ZkmnHxDk55FZ1TqVLvEQTvoByJZRN9jzI0UjxK/QgAmsphz7PGtqgPieQZ/CQcHWXCR7ATDNhGe+YA==} + + convict@6.2.4: + resolution: {integrity: sha512-qN60BAwdMVdofckX7AlohVJ2x9UvjTNoKVXCL2LxFk1l7757EJqf1nySdMkPQer0bt8kQ5lQiyZ9/2NvrFBuwQ==} + engines: {node: '>=6'} + + cross-spawn@7.0.6: + resolution: {integrity: sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==} + engines: {node: '>= 8'} + + data-view-buffer@1.0.2: + resolution: {integrity: sha512-EmKO5V3OLXh1rtK2wgXRansaK1/mtVdTUEiEI0W8RkvgT05kfxaH29PliLnpLP73yYO6142Q72QNa8Wx/A5CqQ==} + engines: {node: '>= 0.4'} + + data-view-byte-length@1.0.2: + resolution: {integrity: sha512-tuhGbE6CfTM9+5ANGf+oQb72Ky/0+s3xKUpHvShfiz2RxMFgFPjsXuRLBVMtvMs15awe45SRb83D6wH4ew6wlQ==} + engines: {node: '>= 0.4'} + + data-view-byte-offset@1.0.1: + resolution: {integrity: sha512-BS8PfmtDGnrgYdOonGZQdLZslWIeCGFP9tpan0hi1Co2Zr2NKADsvGYA8XxuG/4UWgJ6Cjtv+YJnB6MM69QGlQ==} + engines: {node: '>= 0.4'} + + debug@3.2.7: + resolution: {integrity: sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==} + peerDependencies: + supports-color: '*' + peerDependenciesMeta: + supports-color: + optional: true + + debug@4.4.3: + resolution: {integrity: sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==} + engines: {node: '>=6.0'} + peerDependencies: + supports-color: '*' + peerDependenciesMeta: + supports-color: + optional: true + + deep-is@0.1.4: + resolution: {integrity: sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==} + + define-data-property@1.1.4: + resolution: {integrity: sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A==} + engines: {node: '>= 0.4'} + + define-properties@1.2.1: + resolution: {integrity: sha512-8QmQKqEASLd5nx0U1B1okLElbUuuttJ/AnYmRXbbbGDWh6uS208EjD4Xqq/I9wK7u0v6O08XhTWnt5XtEbR6Dg==} + engines: {node: '>= 0.4'} + + delayed-stream@1.0.0: + resolution: {integrity: sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==} + engines: {node: '>=0.4.0'} + + dequal@2.0.3: + resolution: {integrity: sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA==} + engines: {node: '>=6'} + + diff-match-patch@1.0.5: + resolution: {integrity: sha512-IayShXAgj/QMXgB0IWmKx+rOPuGMhqm5w6jvFxmVenXKIzRqTAAsbBPT3kWQeGANj3jGgvcvv4yK6SxqYmikgw==} + + dir-glob@3.0.1: + resolution: {integrity: sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==} + engines: {node: '>=8'} + + doctrine@2.1.0: + resolution: {integrity: sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw==} + engines: {node: '>=0.10.0'} + + doctrine@3.0.0: + resolution: {integrity: sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==} + engines: {node: '>=6.0.0'} + + dunder-proto@1.0.1: + resolution: {integrity: sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==} + engines: {node: '>= 0.4'} + + duplexify@4.1.3: + resolution: {integrity: sha512-M3BmBhwJRZsSx38lZyhE53Csddgzl5R7xGJNk7CVddZD6CcmwMCH8J+7AprIrQKH7TonKxaCjcv27Qmf+sQ+oA==} + + ecdsa-sig-formatter@1.0.11: + resolution: {integrity: sha512-nagl3RYrbNv6kQkeJIpt6NJZy8twLB/2vtz6yN9Z4vRKHN4/QZJIEbqohALSgwKdnksuY3k5Addp5lg8sVoVcQ==} + + emoji-regex@8.0.0: + resolution: {integrity: sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==} + + end-of-stream@1.4.5: + resolution: {integrity: sha512-ooEGc6HP26xXq/N+GCGOT0JKCLDGrq2bQUZrQ7gyrJiZANJ/8YDTxTpQBXGMn+WbIQXNVpyWymm7KYVICQnyOg==} + + es-abstract@1.24.1: + resolution: {integrity: sha512-zHXBLhP+QehSSbsS9Pt23Gg964240DPd6QCf8WpkqEXxQ7fhdZzYsocOr5u7apWonsS5EjZDmTF+/slGMyasvw==} + engines: {node: '>= 0.4'} + + es-define-property@1.0.1: + resolution: {integrity: sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==} + engines: {node: '>= 0.4'} + + es-errors@1.3.0: + resolution: {integrity: sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==} + engines: {node: '>= 0.4'} + + es-object-atoms@1.1.1: + resolution: {integrity: sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==} + engines: {node: '>= 0.4'} + + es-set-tostringtag@2.1.0: + resolution: {integrity: sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==} + engines: {node: '>= 0.4'} + + es-shim-unscopables@1.1.0: + resolution: {integrity: sha512-d9T8ucsEhh8Bi1woXCf+TIKDIROLG5WCkxg8geBCbvk22kzwC5G2OnXVMO6FUsvQlgUUXQ2itephWDLqDzbeCw==} + engines: {node: '>= 0.4'} + + es-to-primitive@1.3.0: + resolution: {integrity: sha512-w+5mJ3GuFL+NjVtJlvydShqE1eN3h3PbI7/5LAsYJP/2qtuMXjfL2LpHSRqo4b4eSF5K/DH1JXKUAHSB2UW50g==} + engines: {node: '>= 0.4'} + + escalade@3.2.0: + resolution: {integrity: sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==} + engines: {node: '>=6'} + + escape-string-regexp@4.0.0: + resolution: {integrity: sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==} + engines: {node: '>=10'} + + eslint-config-airbnb-base@15.0.0: + resolution: {integrity: sha512-xaX3z4ZZIcFLvh2oUNvcX5oEofXda7giYmuplVxoOg5A7EXJMrUyqRgR+mhDhPK8LZ4PttFOBvCYDbX3sUoUig==} + engines: {node: ^10.12.0 || >=12.0.0} + peerDependencies: + eslint: ^7.32.0 || ^8.2.0 + eslint-plugin-import: ^2.25.2 + + eslint-config-prettier@9.1.2: + resolution: {integrity: sha512-iI1f+D2ViGn+uvv5HuHVUamg8ll4tN+JRHGc6IJi4TP9Kl976C57fzPXgseXNs8v0iA8aSJpHsTWjDb9QJamGQ==} + hasBin: true + peerDependencies: + eslint: '>=7.0.0' + + eslint-import-resolver-node@0.3.9: + resolution: {integrity: sha512-WFj2isz22JahUv+B788TlO3N6zL3nNJGU8CcZbPZvVEkBPaJdCV4vy5wyghty5ROFbCRnm132v8BScu5/1BQ8g==} + + eslint-module-utils@2.12.1: + resolution: {integrity: sha512-L8jSWTze7K2mTg0vos/RuLRS5soomksDPoJLXIslC7c8Wmut3bx7CPpJijDcBZtxQ5lrbUdM+s0OlNbz0DCDNw==} + engines: {node: '>=4'} + peerDependencies: + '@typescript-eslint/parser': '*' + eslint: '*' + eslint-import-resolver-node: '*' + eslint-import-resolver-typescript: '*' + eslint-import-resolver-webpack: '*' + peerDependenciesMeta: + '@typescript-eslint/parser': + optional: true + eslint: + optional: true + eslint-import-resolver-node: + optional: true + eslint-import-resolver-typescript: + optional: true + eslint-import-resolver-webpack: + optional: true + + eslint-plugin-import@2.32.0: + resolution: {integrity: sha512-whOE1HFo/qJDyX4SnXzP4N6zOWn79WhnCUY/iDR0mPfQZO8wcYE4JClzI2oZrhBnnMUCBCHZhO6VQyoBU95mZA==} + engines: {node: '>=4'} + peerDependencies: + '@typescript-eslint/parser': '*' + eslint: ^2 || ^3 || ^4 || ^5 || ^6 || ^7.2.0 || ^8 || ^9 + peerDependenciesMeta: + '@typescript-eslint/parser': + optional: true + + eslint-scope@7.2.2: + resolution: {integrity: sha512-dOt21O7lTMhDM+X9mB4GX+DZrZtCUJPL/wlcTqxyrx5IvO0IYtILdtrQGQp+8n5S0gwSVmOf9NQrjMOgfQZlIg==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + + eslint-visitor-keys@3.4.3: + resolution: {integrity: sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + + eslint@8.57.1: + resolution: {integrity: sha512-ypowyDxpVSYpkXr9WPv2PAZCtNip1Mv5KTW0SCurXv/9iOpcrH9PaqUElksqEB6pChqHGDRCFTyrZlGhnLNGiA==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + deprecated: This version is no longer supported. Please see https://eslint.org/version-support for other options. + hasBin: true + + espree@9.6.1: + resolution: {integrity: sha512-oruZaFkjorTpF32kDSI5/75ViwGeZginGGy2NoOSg3Q9bnwlnmDm4HLnkl0RE3n+njDXR037aY1+x58Z/zFdwQ==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + + esquery@1.7.0: + resolution: {integrity: sha512-Ap6G0WQwcU/LHsvLwON1fAQX9Zp0A2Y6Y/cJBl9r/JbW90Zyg4/zbG6zzKa2OTALELarYHmKu0GhpM5EO+7T0g==} + engines: {node: '>=0.10'} + + esrecurse@4.3.0: + resolution: {integrity: sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==} + engines: {node: '>=4.0'} + + estraverse@5.3.0: + resolution: {integrity: sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==} + engines: {node: '>=4.0'} + + esutils@2.0.3: + resolution: {integrity: sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==} + engines: {node: '>=0.10.0'} + + event-target-shim@5.0.1: + resolution: {integrity: sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==} + engines: {node: '>=6'} + + events@3.3.0: + resolution: {integrity: sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==} + engines: {node: '>=0.8.x'} + + extend@3.0.2: + resolution: {integrity: sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==} + + fast-deep-equal@3.1.3: + resolution: {integrity: sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==} + + fast-glob@3.3.3: + resolution: {integrity: sha512-7MptL8U0cqcFdzIzwOTHoilX9x5BrNqye7Z/LuC7kCMRio1EMSyqRK3BEAUD7sXRq4iT4AzTVuZdhgQ2TCvYLg==} + engines: {node: '>=8.6.0'} + + fast-json-stable-stringify@2.1.0: + resolution: {integrity: sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==} + + fast-levenshtein@2.0.6: + resolution: {integrity: sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==} + + fast-xml-parser@5.3.6: + resolution: {integrity: sha512-QNI3sAvSvaOiaMl8FYU4trnEzCwiRr8XMWgAHzlrWpTSj+QaCSvOf1h82OEP1s4hiAXhnbXSyFWCf4ldZzZRVA==} + hasBin: true + + fastq@1.20.1: + resolution: {integrity: sha512-GGToxJ/w1x32s/D2EKND7kTil4n8OVk/9mycTc4VDza13lOvpUZTGX3mFSCtV9ksdGBVzvsyAVLM6mHFThxXxw==} + + file-entry-cache@6.0.1: + resolution: {integrity: sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==} + engines: {node: ^10.12.0 || >=12.0.0} + + fill-range@7.1.1: + resolution: {integrity: sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==} + engines: {node: '>=8'} + + find-up@5.0.0: + resolution: {integrity: sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==} + engines: {node: '>=10'} + + flat-cache@3.2.0: + resolution: {integrity: sha512-CYcENa+FtcUKLmhhqyctpclsq7QF38pKjZHsGNiSQF5r4FtoKDWabFDl3hzaEQMvT1LHEysw5twgLvpYYb4vbw==} + engines: {node: ^10.12.0 || >=12.0.0} + + flatted@3.3.3: + resolution: {integrity: sha512-GX+ysw4PBCz0PzosHDepZGANEuFCMLrnRTiEy9McGjmkCQYwRq4A/X786G/fjM/+OjsWSU1ZrY5qyARZmO/uwg==} + + follow-redirects@1.15.11: + resolution: {integrity: sha512-deG2P0JfjrTxl50XGCDyfI97ZGVCxIpfKYmfyrQ54n5FO/0gfIES8C/Psl6kWVDolizcaaxZJnTS0QSMxvnsBQ==} + engines: {node: '>=4.0'} + peerDependencies: + debug: '*' + peerDependenciesMeta: + debug: + optional: true + + for-each@0.3.5: + resolution: {integrity: sha512-dKx12eRCVIzqCxFGplyFKJMPvLEWgmNtUrpTiJIR5u97zEhRG8ySrtboPHZXx7daLxQVrl643cTzbab2tkQjxg==} + engines: {node: '>= 0.4'} + + form-data-encoder@1.7.2: + resolution: {integrity: sha512-qfqtYan3rxrnCk1VYaA4H+Ms9xdpPqvLZa6xmMgFvhO32x7/3J/ExcTd6qpxM0vH2GdMI+poehyBZvqfMTto8A==} + + form-data-encoder@4.1.0: + resolution: {integrity: sha512-G6NsmEW15s0Uw9XnCg+33H3ViYRyiM0hMrMhhqQOR8NFc5GhYrI+6I3u7OTw7b91J2g8rtvMBZJDbcGb2YUniw==} + engines: {node: '>= 18'} + + form-data@2.5.5: + resolution: {integrity: sha512-jqdObeR2rxZZbPSGL+3VckHMYtu+f9//KXBsVny6JSX/pa38Fy+bGjuG8eW/H6USNQWhLi8Num++cU2yOCNz4A==} + engines: {node: '>= 0.12'} + + form-data@4.0.5: + resolution: {integrity: sha512-8RipRLol37bNs2bhoV67fiTEvdTrbMUYcFTiy3+wuuOnUog2QBHCZWXDRijWQfAkhBj2Uf5UnVaiWwA5vdd82w==} + engines: {node: '>= 6'} + + formdata-node@4.4.1: + resolution: {integrity: sha512-0iirZp3uVDjVGt9p49aTaqjk84TrglENEDuqfdlZQ1roC9CWlPk6Avf8EEnZNcAqPonwkG35x4n3ww/1THYAeQ==} + engines: {node: '>= 12.20'} + + formdata-node@6.0.3: + resolution: {integrity: sha512-8e1++BCiTzUno9v5IZ2J6bv4RU+3UKDmqWUQD0MIMVCd9AdhWkO1gw57oo1mNEX1dMq2EGI+FbWz4B92pscSQg==} + engines: {node: '>= 18'} + + fs.realpath@1.0.0: + resolution: {integrity: sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==} + + fsevents@2.3.2: + resolution: {integrity: sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==} + engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0} + os: [darwin] + + function-bind@1.1.2: + resolution: {integrity: sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==} + + function.prototype.name@1.1.8: + resolution: {integrity: sha512-e5iwyodOHhbMr/yNrc7fDYG4qlbIvI5gajyzPnb5TCwyhjApznQh1BMFou9b30SevY43gCJKXycoCBjMbsuW0Q==} + engines: {node: '>= 0.4'} + + functions-have-names@1.2.3: + resolution: {integrity: sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ==} + + gaxios@6.7.1: + resolution: {integrity: sha512-LDODD4TMYx7XXdpwxAVRAIAuB0bzv0s+ywFonY46k126qzQHT9ygyoa9tncmOiQmmDrik65UYsEkv3lbfqQ3yQ==} + engines: {node: '>=14'} + + gcp-metadata@6.1.1: + resolution: {integrity: sha512-a4tiq7E0/5fTjxPAaH4jpjkSv/uCaU2p5KC6HVGrvl0cDjA8iBZv4vv1gyzlmK0ZUKqwpOyQMKzZQe3lTit77A==} + engines: {node: '>=14'} + + generator-function@2.0.1: + resolution: {integrity: sha512-SFdFmIJi+ybC0vjlHN0ZGVGHc3lgE0DxPAT0djjVg+kjOnSqclqmj0KQ7ykTOLP6YxoqOvuAODGdcHJn+43q3g==} + engines: {node: '>= 0.4'} + + get-caller-file@2.0.5: + resolution: {integrity: sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==} + engines: {node: 6.* || 8.* || >= 10.*} + + get-intrinsic@1.3.0: + resolution: {integrity: sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==} + engines: {node: '>= 0.4'} + + get-proto@1.0.1: + resolution: {integrity: sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==} + engines: {node: '>= 0.4'} + + get-symbol-description@1.1.0: + resolution: {integrity: sha512-w9UMqWwJxHNOvoNzSJ2oPF5wvYcvP7jUvYzhp67yEhTi17ZDBBC1z9pTdGuzjD+EFIqLSYRweZjqfiPzQ06Ebg==} + engines: {node: '>= 0.4'} + + glob-parent@5.1.2: + resolution: {integrity: sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==} + engines: {node: '>= 6'} + + glob-parent@6.0.2: + resolution: {integrity: sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==} + engines: {node: '>=10.13.0'} + + glob@7.2.3: + resolution: {integrity: sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==} + deprecated: Old versions of glob are not supported, and contain widely publicized security vulnerabilities, which have been fixed in the current version. Please update. Support for old versions may be purchased (at exorbitant rates) by contacting i@izs.me + + globals@13.24.0: + resolution: {integrity: sha512-AhO5QUcj8llrbG09iWhPU2B204J1xnPeL8kQmVorSsy+Sjj1sk8gIyh6cUocGmH4L0UuhAJy+hJMRA4mgA4mFQ==} + engines: {node: '>=8'} + + globalthis@1.0.4: + resolution: {integrity: sha512-DpLKbNU4WylpxJykQujfCcwYWiV/Jhm50Goo0wrVILAv5jOr9d+H+UR3PhSCD2rCCEIg0uc+G+muBTwD54JhDQ==} + engines: {node: '>= 0.4'} + + globby@11.1.0: + resolution: {integrity: sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==} + engines: {node: '>=10'} + + google-auth-library@9.15.1: + resolution: {integrity: sha512-Jb6Z0+nvECVz+2lzSMt9u98UsoakXxA2HGHMCxh+so3n90XgYWkq5dur19JAJV7ONiJY22yBTyJB1TSkvPq9Ng==} + engines: {node: '>=14'} + + google-gax@4.6.1: + resolution: {integrity: sha512-V6eky/xz2mcKfAd1Ioxyd6nmA61gao3n01C+YeuIwu3vzM9EDR6wcVzMSIbLMDXWeoi9SHYctXuKYC5uJUT3eQ==} + engines: {node: '>=14'} + + google-logging-utils@0.0.2: + resolution: {integrity: sha512-NEgUnEcBiP5HrPzufUkBzJOD/Sxsco3rLNo1F1TNf7ieU8ryUzBhqba8r756CjLX7rn3fHl6iLEwPYuqpoKgQQ==} + engines: {node: '>=14'} + + gopd@1.2.0: + resolution: {integrity: sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==} + engines: {node: '>= 0.4'} + + graphemer@1.4.0: + resolution: {integrity: sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag==} + + groq-sdk@0.5.0: + resolution: {integrity: sha512-RVmhW7qZ+XZoy5fIuSdx/LGQJONpL8MHgZEW7dFwTdgkzStub2XQx6OKv28CHogijdwH41J+Npj/z2jBPu3vmw==} + + gtoken@7.1.0: + resolution: {integrity: sha512-pCcEwRi+TKpMlxAQObHDQ56KawURgyAf6jtIY046fJ5tIv3zDe/LEIubckAO8fj6JnAxLdmWkUfNyulQ2iKdEw==} + engines: {node: '>=14.0.0'} + + has-bigints@1.1.0: + resolution: {integrity: sha512-R3pbpkcIqv2Pm3dUwgjclDRVmWpTJW2DcMzcIhEXEx1oh/CEMObMm3KLmRJOdvhM7o4uQBnwr8pzRK2sJWIqfg==} + engines: {node: '>= 0.4'} + + has-flag@4.0.0: + resolution: {integrity: sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==} + engines: {node: '>=8'} + + has-property-descriptors@1.0.2: + resolution: {integrity: sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg==} + + has-proto@1.2.0: + resolution: {integrity: sha512-KIL7eQPfHQRC8+XluaIw7BHUwwqL19bQn4hzNgdr+1wXoU0KKj6rufu47lhY7KbJR2C6T6+PfyN0Ea7wkSS+qQ==} + engines: {node: '>= 0.4'} + + has-symbols@1.1.0: + resolution: {integrity: sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==} + engines: {node: '>= 0.4'} + + has-tostringtag@1.0.2: + resolution: {integrity: sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==} + engines: {node: '>= 0.4'} + + hasown@2.0.2: + resolution: {integrity: sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==} + engines: {node: '>= 0.4'} + + http-proxy-agent@5.0.0: + resolution: {integrity: sha512-n2hY8YdoRE1i7r6M0w9DIw5GgZN0G25P8zLCRQ8rjXtTU3vsNFBI/vWK/UIeE6g5MUUz6avwAPXmL6Fy9D/90w==} + engines: {node: '>= 6'} + + https-proxy-agent@5.0.1: + resolution: {integrity: sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA==} + engines: {node: '>= 6'} + + https-proxy-agent@7.0.6: + resolution: {integrity: sha512-vK9P5/iUfdl95AI+JVyUuIcVtd4ofvtrOr3HNtM2yxC9bnMbEdp3x01OhQNnjb8IJYi38VlTE3mBXwcfvywuSw==} + engines: {node: '>= 14'} + + humanize-ms@1.2.1: + resolution: {integrity: sha512-Fl70vYtsAFb/C06PTS9dZBo7ihau+Tu/DNCk/OyHhea07S+aeMWpFFkUaXRa8fI+ScZbEI8dfSxwY7gxZ9SAVQ==} + + idb-keyval@6.2.2: + resolution: {integrity: sha512-yjD9nARJ/jb1g+CvD0tlhUHOrJ9Sy0P8T9MF3YaLlHnSRpwPfpTX0XIvpmw3gAJUmEu3FiICLBDPXVwyEvrleg==} + + ieee754@1.2.1: + resolution: {integrity: sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==} + + ignore@5.3.2: + resolution: {integrity: sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g==} + engines: {node: '>= 4'} + + import-fresh@3.3.1: + resolution: {integrity: sha512-TR3KfrTZTYLPB6jUjfx6MF9WcWrHL9su5TObK4ZkYgBdWKPOFoSoQIdEuTuR82pmtxH2spWG9h6etwfr1pLBqQ==} + engines: {node: '>=6'} + + imurmurhash@0.1.4: + resolution: {integrity: sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==} + engines: {node: '>=0.8.19'} + + inflight@1.0.6: + resolution: {integrity: sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==} + deprecated: This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful. + + inherits@2.0.4: + resolution: {integrity: sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==} + + internal-slot@1.1.0: + resolution: {integrity: sha512-4gd7VpWNQNB4UKKCFFVcp1AVv+FMOgs9NKzjHKusc8jTMhd5eL1NqQqOpE0KzMds804/yHlglp3uxgluOqAPLw==} + engines: {node: '>= 0.4'} + + is-array-buffer@3.0.5: + resolution: {integrity: sha512-DDfANUiiG2wC1qawP66qlTugJeL5HyzMpfr8lLK+jMQirGzNod0B12cFB/9q838Ru27sBwfw78/rdoU7RERz6A==} + engines: {node: '>= 0.4'} + + is-async-function@2.1.1: + resolution: {integrity: sha512-9dgM/cZBnNvjzaMYHVoxxfPj2QXt22Ev7SuuPrs+xav0ukGB0S6d4ydZdEiM48kLx5kDV+QBPrpVnFyefL8kkQ==} + engines: {node: '>= 0.4'} + + is-bigint@1.1.0: + resolution: {integrity: sha512-n4ZT37wG78iz03xPRKJrHTdZbe3IicyucEtdRsV5yglwc3GyUfbAfpSeD0FJ41NbUNSt5wbhqfp1fS+BgnvDFQ==} + engines: {node: '>= 0.4'} + + is-boolean-object@1.2.2: + resolution: {integrity: sha512-wa56o2/ElJMYqjCjGkXri7it5FbebW5usLw/nPmCMs5DeZ7eziSYZhSmPRn0txqeW4LnAmQQU7FgqLpsEFKM4A==} + engines: {node: '>= 0.4'} + + is-callable@1.2.7: + resolution: {integrity: sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA==} + engines: {node: '>= 0.4'} + + is-core-module@2.16.1: + resolution: {integrity: sha512-UfoeMA6fIJ8wTYFEUjelnaGI67v6+N7qXJEvQuIGa99l4xsCruSYOVSQ0uPANn4dAzm8lkYPaKLrrijLq7x23w==} + engines: {node: '>= 0.4'} + + is-data-view@1.0.2: + resolution: {integrity: sha512-RKtWF8pGmS87i2D6gqQu/l7EYRlVdfzemCJN/P3UOs//x1QE7mfhvzHIApBTRf7axvT6DMGwSwBXYCT0nfB9xw==} + engines: {node: '>= 0.4'} + + is-date-object@1.1.0: + resolution: {integrity: sha512-PwwhEakHVKTdRNVOw+/Gyh0+MzlCl4R6qKvkhuvLtPMggI1WAHt9sOwZxQLSGpUaDnrdyDsomoRgNnCfKNSXXg==} + engines: {node: '>= 0.4'} + + is-electron@2.2.2: + resolution: {integrity: sha512-FO/Rhvz5tuw4MCWkpMzHFKWD2LsfHzIb7i6MdPYZ/KW7AlxawyLkqdy+jPZP1WubqEADE3O4FUENlJHDfQASRg==} + + is-extglob@2.1.1: + resolution: {integrity: sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==} + engines: {node: '>=0.10.0'} + + is-finalizationregistry@1.1.1: + resolution: {integrity: sha512-1pC6N8qWJbWoPtEjgcL2xyhQOP491EQjeUo3qTKcmV8YSDDJrOepfG8pcC7h/QgnQHYSv0mJ3Z/ZWxmatVrysg==} + engines: {node: '>= 0.4'} + + is-fullwidth-code-point@3.0.0: + resolution: {integrity: sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==} + engines: {node: '>=8'} + + is-generator-function@1.1.2: + resolution: {integrity: sha512-upqt1SkGkODW9tsGNG5mtXTXtECizwtS2kA161M+gJPc1xdb/Ax629af6YrTwcOeQHbewrPNlE5Dx7kzvXTizA==} + engines: {node: '>= 0.4'} + + is-glob@4.0.3: + resolution: {integrity: sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==} + engines: {node: '>=0.10.0'} + + is-map@2.0.3: + resolution: {integrity: sha512-1Qed0/Hr2m+YqxnM09CjA2d/i6YZNfF6R2oRAOj36eUdS6qIV/huPJNSEpKbupewFs+ZsJlxsjjPbc0/afW6Lw==} + engines: {node: '>= 0.4'} + + is-negative-zero@2.0.3: + resolution: {integrity: sha512-5KoIu2Ngpyek75jXodFvnafB6DJgr3u8uuK0LEZJjrU19DrMD3EVERaR8sjz8CCGgpZvxPl9SuE1GMVPFHx1mw==} + engines: {node: '>= 0.4'} + + is-number-object@1.1.1: + resolution: {integrity: sha512-lZhclumE1G6VYD8VHe35wFaIif+CTy5SJIi5+3y4psDgWu4wPDoBhF8NxUOinEc7pHgiTsT6MaBb92rKhhD+Xw==} + engines: {node: '>= 0.4'} + + is-number@7.0.0: + resolution: {integrity: sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==} + engines: {node: '>=0.12.0'} + + is-path-inside@3.0.3: + resolution: {integrity: sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==} + engines: {node: '>=8'} + + is-regex@1.2.1: + resolution: {integrity: sha512-MjYsKHO5O7mCsmRGxWcLWheFqN9DJ/2TmngvjKXihe6efViPqc274+Fx/4fYj/r03+ESvBdTXK0V6tA3rgez1g==} + engines: {node: '>= 0.4'} + + is-set@2.0.3: + resolution: {integrity: sha512-iPAjerrse27/ygGLxw+EBR9agv9Y6uLeYVJMu+QNCoouJ1/1ri0mGrcWpfCqFZuzzx3WjtwxG098X+n4OuRkPg==} + engines: {node: '>= 0.4'} + + is-shared-array-buffer@1.0.4: + resolution: {integrity: sha512-ISWac8drv4ZGfwKl5slpHG9OwPNty4jOWPRIhBpxOoD+hqITiwuipOQ2bNthAzwA3B4fIjO4Nln74N0S9byq8A==} + engines: {node: '>= 0.4'} + + is-stream@2.0.1: + resolution: {integrity: sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==} + engines: {node: '>=8'} + + is-string@1.1.1: + resolution: {integrity: sha512-BtEeSsoaQjlSPBemMQIrY1MY0uM6vnS1g5fmufYOtnxLGUZM2178PKbhsk7Ffv58IX+ZtcvoGwccYsh0PglkAA==} + engines: {node: '>= 0.4'} + + is-symbol@1.1.1: + resolution: {integrity: sha512-9gGx6GTtCQM73BgmHQXfDmLtfjjTUDSyoxTCbp5WtoixAhfgsDirWIcVQ/IHpvI5Vgd5i/J5F7B9cN/WlVbC/w==} + engines: {node: '>= 0.4'} + + is-typed-array@1.1.15: + resolution: {integrity: sha512-p3EcsicXjit7SaskXHs1hA91QxgTw46Fv6EFKKGS5DRFLD8yKnohjF3hxoju94b/OcMZoQukzpPpBE9uLVKzgQ==} + engines: {node: '>= 0.4'} + + is-url@1.2.4: + resolution: {integrity: sha512-ITvGim8FhRiYe4IQ5uHSkj7pVaPDrCTkNd3yq3cV7iZAcJdHTUMPMEHcqSOy9xZ9qFenQCvi+2wjH9a1nXqHww==} + + is-weakmap@2.0.2: + resolution: {integrity: sha512-K5pXYOm9wqY1RgjpL3YTkF39tni1XajUIkawTLUo9EZEVUFga5gSQJF8nNS7ZwJQ02y+1YCNYcMh+HIf1ZqE+w==} + engines: {node: '>= 0.4'} + + is-weakref@1.1.1: + resolution: {integrity: sha512-6i9mGWSlqzNMEqpCp93KwRS1uUOodk2OJ6b+sq7ZPDSy2WuI5NFIxp/254TytR8ftefexkWn5xNiHUNpPOfSew==} + engines: {node: '>= 0.4'} + + is-weakset@2.0.4: + resolution: {integrity: sha512-mfcwb6IzQyOKTs84CQMrOwW4gQcaTOAWJ0zzJCl2WSPDrWk/OzDaImWFH3djXhb24g4eudZfLRozAvPGw4d9hQ==} + engines: {node: '>= 0.4'} + + is@3.3.2: + resolution: {integrity: sha512-a2xr4E3s1PjDS8ORcGgXpWx6V+liNs+O3JRD2mb9aeugD7rtkkZ0zgLdYgw0tWsKhsdiezGYptSiMlVazCBTuQ==} + engines: {node: '>= 0.4'} + + isarray@2.0.5: + resolution: {integrity: sha512-xHjhDr3cNBK0BzdUJSPXZntQUx/mwMS5Rw4A7lPJ90XGAO6ISP/ePDNuo0vhqOZU+UD5JoodwCAAoZQd3FeAKw==} + + isexe@2.0.0: + resolution: {integrity: sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==} + + js-yaml@4.1.1: + resolution: {integrity: sha512-qQKT4zQxXl8lLwBtHMWwaTcGfFOZviOJet3Oy/xmGk2gZH677CJM9EvtfdSkgWcATZhj/55JZ0rmy3myCT5lsA==} + hasBin: true + + json-bigint@1.0.0: + resolution: {integrity: sha512-SiPv/8VpZuWbvLSMtTDU8hEfrZWg/mH/nV/b4o0CYbSxu1UIQPLdwKOCIyLQX+VIPO5vrLX3i8qtqFyhdPSUSQ==} + + json-buffer@3.0.1: + resolution: {integrity: sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==} + + json-schema-traverse@0.4.1: + resolution: {integrity: sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==} + + json-schema@0.4.0: + resolution: {integrity: sha512-es94M3nTIfsEPisRafak+HDLfHXnKBhV3vU5eqPcS3flIWqcxJWgXHXiey3YrpaNsanY5ei1VoYEbOzijuq9BA==} + + json-stable-stringify-without-jsonify@1.0.1: + resolution: {integrity: sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==} + + json5@1.0.2: + resolution: {integrity: sha512-g1MWMLBiz8FKi1e4w0UyVL3w+iJceWAFBAaBnnGKOpNa5f8TLktkbre1+s6oICydWAm+HRUGTmI+//xv2hvXYA==} + hasBin: true + + jsondiffpatch@0.6.0: + resolution: {integrity: sha512-3QItJOXp2AP1uv7waBkao5nCvhEv+QmJAd38Ybq7wNI74Q+BBmnLn4EDKz6yI9xGAIQoUF87qHt+kc1IVxB4zQ==} + engines: {node: ^18.0.0 || >=20.0.0} + hasBin: true + + jwa@2.0.1: + resolution: {integrity: sha512-hRF04fqJIP8Abbkq5NKGN0Bbr3JxlQ+qhZufXVr0DvujKy93ZCbXZMHDL4EOtodSbCWxOqR8MS1tXA5hwqCXDg==} + + jws@4.0.1: + resolution: {integrity: sha512-EKI/M/yqPncGUUh44xz0PxSidXFr/+r0pA70+gIYhjv+et7yxM+s29Y+VGDkovRofQem0fs7Uvf4+YmAdyRduA==} + + keyv@4.5.4: + resolution: {integrity: sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==} + + levn@0.4.1: + resolution: {integrity: sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==} + engines: {node: '>= 0.8.0'} + + locate-path@6.0.0: + resolution: {integrity: sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==} + engines: {node: '>=10'} + + lodash.camelcase@4.3.0: + resolution: {integrity: sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA==} + + lodash.clonedeep@4.5.0: + resolution: {integrity: sha512-H5ZhCF25riFd9uB5UCkVKo61m3S/xZk1x4wA6yp/L3RFP6Z/eHH1ymQcGLo7J3GMPfm0V/7m1tryHuGVxpqEBQ==} + + lodash.merge@4.6.2: + resolution: {integrity: sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==} + + long@5.3.2: + resolution: {integrity: sha512-mNAgZ1GmyNhD7AuqnTG3/VQ26o760+ZYBPKjPvugO8+nLbYfX6TVpJPseBvopbdY+qpZ/lKUnmEc1LeZYS3QAA==} + + math-intrinsics@1.1.0: + resolution: {integrity: sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==} + engines: {node: '>= 0.4'} + + merge2@1.4.1: + resolution: {integrity: sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==} + engines: {node: '>= 8'} + + micromatch@4.0.8: + resolution: {integrity: sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==} + engines: {node: '>=8.6'} + + mime-db@1.52.0: + resolution: {integrity: sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==} + engines: {node: '>= 0.6'} + + mime-types@2.1.35: + resolution: {integrity: sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==} + engines: {node: '>= 0.6'} + + minimatch@3.1.5: + resolution: {integrity: sha512-VgjWUsnnT6n+NUk6eZq77zeFdpW2LWDzP6zFGrCbHXiYNul5Dzqk2HHQ5uFH2DNW5Xbp8+jVzaeNt94ssEEl4w==} + + minimatch@9.0.3: + resolution: {integrity: sha512-RHiac9mvaRw0x3AYRgDC1CxAP7HTcNrrECeA8YYJeWnpo+2Q5CegtZjaotWTWxDG3UeGA1coE05iH1mPjT/2mg==} + engines: {node: '>=16 || 14 >=14.17'} + + minimist@1.2.8: + resolution: {integrity: sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==} + + ms@2.1.3: + resolution: {integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==} + + nanoid@3.3.11: + resolution: {integrity: sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==} + engines: {node: ^10 || ^12 || ^13.7 || ^14 || >=15.0.1} + hasBin: true + + natural-compare@1.4.0: + resolution: {integrity: sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==} + + nice-grpc-common@2.0.2: + resolution: {integrity: sha512-7RNWbls5kAL1QVUOXvBsv1uO0wPQK3lHv+cY1gwkTzirnG1Nop4cBJZubpgziNbaVc/bl9QJcyvsf/NQxa3rjQ==} + + nice-grpc@2.1.14: + resolution: {integrity: sha512-GK9pKNxlvnU5FAdaw7i2FFuR9CqBspcE+if2tqnKXBcE0R8525wj4BZvfcwj7FjvqbssqKxRHt2nwedalbJlww==} + + node-domexception@1.0.0: + resolution: {integrity: sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==} + engines: {node: '>=10.5.0'} + deprecated: Use your platform's native DOMException instead + + node-fetch@2.7.0: + resolution: {integrity: sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==} + engines: {node: 4.x || >=6.0.0} + peerDependencies: + encoding: ^0.1.0 + peerDependenciesMeta: + encoding: + optional: true + + object-hash@3.0.0: + resolution: {integrity: sha512-RSn9F68PjH9HqtltsSnqYC1XXoWe9Bju5+213R98cNGttag9q9yAOTzdbsqvIa7aNm5WffBZFpWYr2aWrklWAw==} + engines: {node: '>= 6'} + + object-inspect@1.13.4: + resolution: {integrity: sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew==} + engines: {node: '>= 0.4'} + + object-keys@1.1.1: + resolution: {integrity: sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==} + engines: {node: '>= 0.4'} + + object.assign@4.1.7: + resolution: {integrity: sha512-nK28WOo+QIjBkDduTINE4JkF/UJJKyf2EJxvJKfblDpyg0Q+pkOHNTL0Qwy6NP6FhE/EnzV73BxxqcJaXY9anw==} + engines: {node: '>= 0.4'} + + object.entries@1.1.9: + resolution: {integrity: sha512-8u/hfXFRBD1O0hPUjioLhoWFHRmt6tKA4/vZPyckBr18l1KE9uHrFaFaUi8MDRTpi4uak2goyPTSNJLXX2k2Hw==} + engines: {node: '>= 0.4'} + + object.fromentries@2.0.8: + resolution: {integrity: sha512-k6E21FzySsSK5a21KRADBd/NGneRegFO5pLHfdQLpRDETUNJueLXs3WCzyQ3tFRDYgbq3KHGXfTbi2bs8WQ6rQ==} + engines: {node: '>= 0.4'} + + object.groupby@1.0.3: + resolution: {integrity: sha512-+Lhy3TQTuzXI5hevh8sBGqbmurHbbIjAi0Z4S63nthVLmLxfbj4T54a4CfZrXIrt9iP4mVAPYMo/v99taj3wjQ==} + engines: {node: '>= 0.4'} + + object.values@1.2.1: + resolution: {integrity: sha512-gXah6aZrcUxjWg2zR2MwouP2eHlCBzdV4pygudehaKXSGW4v2AsRQUK+lwwXhii6KFZcunEnmSUoYp5CXibxtA==} + engines: {node: '>= 0.4'} + + once@1.4.0: + resolution: {integrity: sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==} + + openai@4.104.0: + resolution: {integrity: sha512-p99EFNsA/yX6UhVO93f5kJsDRLAg+CTA2RBqdHK4RtK8u5IJw32Hyb2dTGKbnnFmnuoBv5r7Z2CURI9sGZpSuA==} + hasBin: true + peerDependencies: + ws: ^8.18.0 + zod: ^3.23.8 + peerDependenciesMeta: + ws: + optional: true + zod: + optional: true + + opencollective-postinstall@2.0.3: + resolution: {integrity: sha512-8AV/sCtuzUeTo8gQK5qDZzARrulB3egtLzFgteqB2tcT4Mw7B8Kt7JcDHmltjz6FOAHsvTevk70gZEbhM4ZS9Q==} + hasBin: true + + optionator@0.9.4: + resolution: {integrity: sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g==} + engines: {node: '>= 0.8.0'} + + own-keys@1.0.1: + resolution: {integrity: sha512-qFOyK5PjiWZd+QQIh+1jhdb9LpxTF0qs7Pm8o5QHYZ0M3vKqSqzsZaEB6oWlxZ+q2sJBMI/Ktgd2N5ZwQoRHfg==} + engines: {node: '>= 0.4'} + + p-limit@3.1.0: + resolution: {integrity: sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==} + engines: {node: '>=10'} + + p-locate@5.0.0: + resolution: {integrity: sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==} + engines: {node: '>=10'} + + parent-module@1.0.1: + resolution: {integrity: sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==} + engines: {node: '>=6'} + + path-exists@4.0.0: + resolution: {integrity: sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==} + engines: {node: '>=8'} + + path-is-absolute@1.0.1: + resolution: {integrity: sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==} + engines: {node: '>=0.10.0'} + + path-key@3.1.1: + resolution: {integrity: sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==} + engines: {node: '>=8'} + + path-parse@1.0.7: + resolution: {integrity: sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==} + + path-type@4.0.0: + resolution: {integrity: sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==} + engines: {node: '>=8'} + + picomatch@2.3.1: + resolution: {integrity: sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==} + engines: {node: '>=8.6'} + + playwright-core@1.58.2: + resolution: {integrity: sha512-yZkEtftgwS8CsfYo7nm0KE8jsvm6i/PTgVtB8DL726wNf6H2IMsDuxCpJj59KDaxCtSnrWan2AeDqM7JBaultg==} + engines: {node: '>=18'} + hasBin: true + + playwright@1.58.2: + resolution: {integrity: sha512-vA30H8Nvkq/cPBnNw4Q8TWz1EJyqgpuinBcHET0YVJVFldr8JDNiU9LaWAE1KqSkRYazuaBhTpB5ZzShOezQ6A==} + engines: {node: '>=18'} + hasBin: true + + possible-typed-array-names@1.1.0: + resolution: {integrity: sha512-/+5VFTchJDoVj3bhoqi6UeymcD00DAwb1nJwamzPvHEszJ4FpF6SNNbUbOS8yI56qHzdV8eK0qEfOSiodkTdxg==} + engines: {node: '>= 0.4'} + + prelude-ls@1.2.1: + resolution: {integrity: sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==} + engines: {node: '>= 0.8.0'} + + process@0.11.10: + resolution: {integrity: sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A==} + engines: {node: '>= 0.6.0'} + + prom-client@15.1.3: + resolution: {integrity: sha512-6ZiOBfCywsD4k1BN9IX0uZhF+tJkV8q8llP64G5Hajs4JOeVLPCwpPVcpXy3BwYiUGgyJzsJJQeOIv7+hDSq8g==} + engines: {node: ^16 || ^18 || >=20} + + proto3-json-serializer@2.0.2: + resolution: {integrity: sha512-SAzp/O4Yh02jGdRc+uIrGoe87dkN/XtwxfZ4ZyafJHymd79ozp5VG5nyZ7ygqPM5+cpLDjjGnYFUkngonyDPOQ==} + engines: {node: '>=14.0.0'} + + protobufjs@7.5.4: + resolution: {integrity: sha512-CvexbZtbov6jW2eXAvLukXjXUW1TzFaivC46BpWc/3BpcCysb5Vffu+B3XHMm8lVEuy2Mm4XGex8hBSg1yapPg==} + engines: {node: '>=12.0.0'} + + proxy-from-env@1.1.0: + resolution: {integrity: sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==} + + punycode@2.3.1: + resolution: {integrity: sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==} + engines: {node: '>=6'} + + qs@6.15.0: + resolution: {integrity: sha512-mAZTtNCeetKMH+pSjrb76NAM8V9a05I9aBZOHztWy/UqcJdQYNsf59vrRKWnojAT9Y+GbIvoTBC++CPHqpDBhQ==} + engines: {node: '>=0.6'} + + queue-microtask@1.2.3: + resolution: {integrity: sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==} + + react@19.2.4: + resolution: {integrity: sha512-9nfp2hYpCwOjAN+8TZFGhtWEwgvWHXqESH8qT89AT/lWklpLON22Lc8pEtnpsZz7VmawabSU0gCjnj8aC0euHQ==} + engines: {node: '>=0.10.0'} + + readable-stream@3.6.2: + resolution: {integrity: sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==} + engines: {node: '>= 6'} + + readable-stream@4.7.0: + resolution: {integrity: sha512-oIGGmcpTLwPga8Bn6/Z75SVaH1z5dUut2ibSyAMVhmUggWpmDn2dapB0n7f8nwaSiRtepAsfJyfXIO5DCVAODg==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + + reflect.getprototypeof@1.0.10: + resolution: {integrity: sha512-00o4I+DVrefhv+nX0ulyi3biSHCPDe+yLv5o/p6d/UVlirijB8E16FtfwSAi4g3tcqrQ4lRAqQSoFEZJehYEcw==} + engines: {node: '>= 0.4'} + + regenerator-runtime@0.13.11: + resolution: {integrity: sha512-kY1AZVr2Ra+t+piVaJ4gxaFaReZVH40AKNo7UCX6W+dEwBo/2oZJzqfuN1qLq1oL45o56cPaTXELwrTh8Fpggg==} + + regexp.prototype.flags@1.5.4: + resolution: {integrity: sha512-dYqgNSZbDwkaJ2ceRd9ojCGjBq+mOm9LmtXnAnEGyHhN/5R7iDW2TRw3h+o/jCFxus3P2LfWIIiwowAjANm7IA==} + engines: {node: '>= 0.4'} + + require-directory@2.1.1: + resolution: {integrity: sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==} + engines: {node: '>=0.10.0'} + + resolve-from@4.0.0: + resolution: {integrity: sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==} + engines: {node: '>=4'} + + resolve@1.22.11: + resolution: {integrity: sha512-RfqAvLnMl313r7c9oclB1HhUEAezcpLjz95wFH4LVuhk9JF/r22qmVP9AMmOU4vMX7Q8pN8jwNg/CSpdFnMjTQ==} + engines: {node: '>= 0.4'} + hasBin: true + + retry-request@7.0.2: + resolution: {integrity: sha512-dUOvLMJ0/JJYEn8NrpOaGNE7X3vpI5XlZS/u0ANjqtcZVKnIxP7IgCFwrKTxENw29emmwug53awKtaMm4i9g5w==} + engines: {node: '>=14'} + + reusify@1.1.0: + resolution: {integrity: sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw==} + engines: {iojs: '>=1.0.0', node: '>=0.10.0'} + + rimraf@3.0.2: + resolution: {integrity: sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==} + deprecated: Rimraf versions prior to v4 are no longer supported + hasBin: true + + run-parallel@1.2.0: + resolution: {integrity: sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==} + + safe-array-concat@1.1.3: + resolution: {integrity: sha512-AURm5f0jYEOydBj7VQlVvDrjeFgthDdEF5H1dP+6mNpoXOMo1quQqJ4wvJDyRZ9+pO3kGWoOdmV08cSv2aJV6Q==} + engines: {node: '>=0.4'} + + safe-buffer@5.2.1: + resolution: {integrity: sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==} + + safe-push-apply@1.0.0: + resolution: {integrity: sha512-iKE9w/Z7xCzUMIZqdBsp6pEQvwuEebH4vdpjcDWnyzaI6yl6O9FHvVpmGelvEHNsoY6wGblkxR6Zty/h00WiSA==} + engines: {node: '>= 0.4'} + + safe-regex-test@1.1.0: + resolution: {integrity: sha512-x/+Cz4YrimQxQccJf5mKEbIa1NzeCRNI5Ecl/ekmlYaampdNLPalVyIcCZNNH3MvmqBugV5TMYZXv0ljslUlaw==} + engines: {node: '>= 0.4'} + + secure-json-parse@2.7.0: + resolution: {integrity: sha512-6aU+Rwsezw7VR8/nyvKTx8QpWH9FrcYiXXlqC4z5d5XQBDRqtbfsRjnwGyqbi3gddNtWHuEk9OANUotL26qKUw==} + + semver@6.3.1: + resolution: {integrity: sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==} + hasBin: true + + semver@7.7.4: + resolution: {integrity: sha512-vFKC2IEtQnVhpT78h1Yp8wzwrf8CM+MzKMHGJZfBtzhZNycRFnXsHk6E5TxIkkMsgNS7mdX3AGB7x2QM2di4lA==} + engines: {node: '>=10'} + hasBin: true + + set-function-length@1.2.2: + resolution: {integrity: sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg==} + engines: {node: '>= 0.4'} + + set-function-name@2.0.2: + resolution: {integrity: sha512-7PGFlmtwsEADb0WYyvCMa1t+yke6daIG4Wirafur5kcf+MhUnPms1UeR0CKQdTZD81yESwMHbtn+TR+dMviakQ==} + engines: {node: '>= 0.4'} + + set-proto@1.0.0: + resolution: {integrity: sha512-RJRdvCo6IAnPdsvP/7m6bsQqNnn1FCBX5ZNtFL98MmFF/4xAIJTIg1YbHW5DC2W5SKZanrC6i4HsJqlajw/dZw==} + engines: {node: '>= 0.4'} + + shebang-command@2.0.0: + resolution: {integrity: sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==} + engines: {node: '>=8'} + + shebang-regex@3.0.0: + resolution: {integrity: sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==} + engines: {node: '>=8'} + + side-channel-list@1.0.0: + resolution: {integrity: sha512-FCLHtRD/gnpCiCHEiJLOwdmFP+wzCmDEkc9y7NsYxeF4u7Btsn1ZuwgwJGxImImHicJArLP4R0yX4c2KCrMrTA==} + engines: {node: '>= 0.4'} + + side-channel-map@1.0.1: + resolution: {integrity: sha512-VCjCNfgMsby3tTdo02nbjtM/ewra6jPHmpThenkTYh8pG9ucZ/1P8So4u4FGBek/BjpOVsDCMoLA/iuBKIFXRA==} + engines: {node: '>= 0.4'} + + side-channel-weakmap@1.0.2: + resolution: {integrity: sha512-WPS/HvHQTYnHisLo9McqBHOJk2FkHO/tlpvldyrnem4aeQp4hai3gythswg6p01oSoTl58rcpiFAjF2br2Ak2A==} + engines: {node: '>= 0.4'} + + side-channel@1.1.0: + resolution: {integrity: sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw==} + engines: {node: '>= 0.4'} + + slash@3.0.0: + resolution: {integrity: sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==} + engines: {node: '>=8'} + + stop-iteration-iterator@1.1.0: + resolution: {integrity: sha512-eLoXW/DHyl62zxY4SCaIgnRhuMr6ri4juEYARS8E6sCEqzKpOiE521Ucofdx+KnDZl5xmvGYaaKCk5FEOxJCoQ==} + engines: {node: '>= 0.4'} + + stream-events@1.0.5: + resolution: {integrity: sha512-E1GUzBSgvct8Jsb3v2X15pjzN1tYebtbLaMg+eBOUOAxgbLoSbT2NS91ckc5lJD1KfLjId+jXJRgo0qnV5Nerg==} + + stream-shift@1.0.3: + resolution: {integrity: sha512-76ORR0DO1o1hlKwTbi/DM3EXWGf3ZJYO8cXX5RJwnul2DEg2oyoZyjLNoQM8WsvZiFKCRfC1O0J7iCvie3RZmQ==} + + string-width@4.2.3: + resolution: {integrity: sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==} + engines: {node: '>=8'} + + string.prototype.trim@1.2.10: + resolution: {integrity: sha512-Rs66F0P/1kedk5lyYyH9uBzuiI/kNRmwJAR9quK6VOtIpZ2G+hMZd+HQbbv25MgCA6gEffoMZYxlTod4WcdrKA==} + engines: {node: '>= 0.4'} + + string.prototype.trimend@1.0.9: + resolution: {integrity: sha512-G7Ok5C6E/j4SGfyLCloXTrngQIQU3PWtXGst3yM7Bea9FRURf1S42ZHlZZtsNque2FN2PoUhfZXYLNWwEr4dLQ==} + engines: {node: '>= 0.4'} + + string.prototype.trimstart@1.0.8: + resolution: {integrity: sha512-UXSH262CSZY1tfu3G3Secr6uGLCFVPMhIqHjlgCUtCCcgihYc/xKs9djMTMUOb2j1mVSeU8EU6NWc/iQKU6Gfg==} + engines: {node: '>= 0.4'} + + string_decoder@1.3.0: + resolution: {integrity: sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==} + + strip-ansi@6.0.1: + resolution: {integrity: sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==} + engines: {node: '>=8'} + + strip-bom@3.0.0: + resolution: {integrity: sha512-vavAMRXOgBVNF6nyEEmL3DBK19iRpDcoIwW+swQ+CbGiu7lju6t+JklA1MHweoWtadgt4ISVUsXLyDq34ddcwA==} + engines: {node: '>=4'} + + strip-json-comments@3.1.1: + resolution: {integrity: sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==} + engines: {node: '>=8'} + + strnum@2.2.0: + resolution: {integrity: sha512-Y7Bj8XyJxnPAORMZj/xltsfo55uOiyHcU2tnAVzHUnSJR/KsEX+9RoDeXEnsXtl/CX4fAcrt64gZ13aGaWPeBg==} + + stubs@3.0.0: + resolution: {integrity: sha512-PdHt7hHUJKxvTCgbKX9C1V/ftOcjJQgz8BZwNfV5c4B6dcGqlpelTbJ999jBGZ2jYiPAwcX5dP6oBwVlBlUbxw==} + + supports-color@7.2.0: + resolution: {integrity: sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==} + engines: {node: '>=8'} + + supports-preserve-symlinks-flag@1.0.0: + resolution: {integrity: sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==} + engines: {node: '>= 0.4'} + + swr@2.4.1: + resolution: {integrity: sha512-2CC6CiKQtEwaEeNiqWTAw9PGykW8SR5zZX8MZk6TeAvEAnVS7Visz8WzphqgtQ8v2xz/4Q5K+j+SeMaKXeeQIA==} + peerDependencies: + react: ^16.11.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 + + tdigest@0.1.2: + resolution: {integrity: sha512-+G0LLgjjo9BZX2MfdvPfH+MKLCrxlXSYec5DaPYP1fe6Iyhf0/fSmJ0bFiZ1F8BT6cGXl2LpltQptzjXKWEkKA==} + + teeny-request@9.0.0: + resolution: {integrity: sha512-resvxdc6Mgb7YEThw6G6bExlXKkv6+YbuzGg9xuXxSgxJF7Ozs+o8Y9+2R3sArdWdW8nOokoQb1yrpFB0pQK2g==} + engines: {node: '>=14'} + + tesseract.js-core@5.1.1: + resolution: {integrity: sha512-KX3bYSU5iGcO1XJa+QGPbi+Zjo2qq6eBhNjSGR5E5q0JtzkoipJKOUQD7ph8kFyteCEfEQ0maWLu8MCXtvX5uQ==} + + tesseract.js@5.1.1: + resolution: {integrity: sha512-lzVl/Ar3P3zhpUT31NjqeCo1f+D5+YfpZ5J62eo2S14QNVOmHBTtbchHm/YAbOOOzCegFnKf4B3Qih9LuldcYQ==} + + text-table@0.2.0: + resolution: {integrity: sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw==} + + throttleit@2.1.0: + resolution: {integrity: sha512-nt6AMGKW1p/70DF/hGBdJB57B8Tspmbp5gfJ8ilhLnt7kkr2ye7hzD6NVG8GGErk2HWF34igrL2CXmNIkzKqKw==} + engines: {node: '>=18'} + + to-regex-range@5.0.1: + resolution: {integrity: sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==} + engines: {node: '>=8.0'} + + tr46@0.0.3: + resolution: {integrity: sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==} + + ts-api-utils@1.4.3: + resolution: {integrity: sha512-i3eMG77UTMD0hZhgRS562pv83RC6ukSAC2GMNWc+9dieh/+jDM5u5YG+NHX6VNDRHQcHwmsTHctP9LhbC3WxVw==} + engines: {node: '>=16'} + peerDependencies: + typescript: '>=4.2.0' + + ts-error@1.0.6: + resolution: {integrity: sha512-tLJxacIQUM82IR7JO1UUkKlYuUTmoY9HBJAmNWFzheSlDS5SPMcNIepejHJa4BpPQLAcbRhRf3GDJzyj6rbKvA==} + + tsconfig-paths@3.15.0: + resolution: {integrity: sha512-2Ac2RgzDe/cn48GvOe3M+o82pEFewD3UPbyoUHHdKasHwJKjds4fLXWf/Ux5kATBKN20oaFGu+jbElp1pos0mg==} + + tslib@2.8.1: + resolution: {integrity: sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==} + + type-check@0.4.0: + resolution: {integrity: sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==} + engines: {node: '>= 0.8.0'} + + type-fest@0.20.2: + resolution: {integrity: sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==} + engines: {node: '>=10'} + + typed-array-buffer@1.0.3: + resolution: {integrity: sha512-nAYYwfY3qnzX30IkA6AQZjVbtK6duGontcQm1WSG1MD94YLqK0515GNApXkoxKOWMusVssAHWLh9SeaoefYFGw==} + engines: {node: '>= 0.4'} + + typed-array-byte-length@1.0.3: + resolution: {integrity: sha512-BaXgOuIxz8n8pIq3e7Atg/7s+DpiYrxn4vdot3w9KbnBhcRQq6o3xemQdIfynqSeXeDrF32x+WvfzmOjPiY9lg==} + engines: {node: '>= 0.4'} + + typed-array-byte-offset@1.0.4: + resolution: {integrity: sha512-bTlAFB/FBYMcuX81gbL4OcpH5PmlFHqlCCpAl8AlEzMz5k53oNDvN8p1PNOWLEmI2x4orp3raOFB51tv9X+MFQ==} + engines: {node: '>= 0.4'} + + typed-array-length@1.0.7: + resolution: {integrity: sha512-3KS2b+kL7fsuk/eJZ7EQdnEmQoaho/r6KUef7hxvltNA5DR8NAUM+8wJMbJyZ4G9/7i3v5zPBIMN5aybAh2/Jg==} + engines: {node: '>= 0.4'} + + typescript-event-target@1.1.2: + resolution: {integrity: sha512-TvkrTUpv7gCPlcnSoEwUVUBwsdheKm+HF5u2tPAKubkIGMfovdSizCTaZRY/NhR8+Ijy8iZZUapbVQAsNrkFrw==} + + typescript@5.9.3: + resolution: {integrity: sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==} + engines: {node: '>=14.17'} + hasBin: true + + unbox-primitive@1.1.0: + resolution: {integrity: sha512-nWJ91DjeOkej/TA8pXQ3myruKpKEYgqvpw9lz4OPHj/NWFNluYrjbz9j01CJ8yKQd2g4jFoOkINCTW2I5LEEyw==} + engines: {node: '>= 0.4'} + + undici-types@5.26.5: + resolution: {integrity: sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==} + + undici-types@6.21.0: + resolution: {integrity: sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==} + + uri-js@4.4.1: + resolution: {integrity: sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==} + + use-sync-external-store@1.6.0: + resolution: {integrity: sha512-Pp6GSwGP/NrPIrxVFAIkOQeyw8lFenOHijQWkUTrDvrF4ALqylP2C/KCkeS9dpUM3KvYRQhna5vt7IL95+ZQ9w==} + peerDependencies: + react: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 + + util-deprecate@1.0.2: + resolution: {integrity: sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==} + + uuid@9.0.1: + resolution: {integrity: sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA==} + hasBin: true + + wasm-feature-detect@1.8.0: + resolution: {integrity: sha512-zksaLKM2fVlnB5jQQDqKXXwYHLQUVH9es+5TOOHwGOVJOCeRBCiPjwSg+3tN2AdTCzjgli4jijCH290kXb/zWQ==} + + web-streams-polyfill@3.3.3: + resolution: {integrity: sha512-d2JWLCivmZYTSIoge9MsgFCZrt571BikcWGYkjC1khllbTeDlGqZ2D8vD8E/lJa8WGWbb7Plm8/XJYV7IJHZZw==} + engines: {node: '>= 8'} + + web-streams-polyfill@4.0.0-beta.3: + resolution: {integrity: sha512-QW95TCTaHmsYfHDybGMwO5IJIM93I/6vTRk+daHTWFPhwh+C8Cg7j7XyKrwrj8Ib6vYXe0ocYNrmzY4xAAN6ug==} + engines: {node: '>= 14'} + + webidl-conversions@3.0.1: + resolution: {integrity: sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==} + + whatwg-url@5.0.0: + resolution: {integrity: sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==} + + which-boxed-primitive@1.1.1: + resolution: {integrity: sha512-TbX3mj8n0odCBFVlY8AxkqcHASw3L60jIuF8jFP78az3C2YhmGvqbHBpAjTRH2/xqYunrJ9g1jSyjCjpoWzIAA==} + engines: {node: '>= 0.4'} + + which-builtin-type@1.2.1: + resolution: {integrity: sha512-6iBczoX+kDQ7a3+YJBnh3T+KZRxM/iYNPXicqk66/Qfm1b93iu+yOImkg0zHbj5LNOcNv1TEADiZ0xa34B4q6Q==} + engines: {node: '>= 0.4'} + + which-collection@1.0.2: + resolution: {integrity: sha512-K4jVyjnBdgvc86Y6BkaLZEN933SwYOuBFkdmBu9ZfkcAbdVbpITnDmjvZ/aQjRXQrv5EPkTnD1s39GiiqbngCw==} + engines: {node: '>= 0.4'} + + which-typed-array@1.1.20: + resolution: {integrity: sha512-LYfpUkmqwl0h9A2HL09Mms427Q1RZWuOHsukfVcKRq9q95iQxdw0ix1JQrqbcDR9PH1QDwf5Qo8OZb5lksZ8Xg==} + engines: {node: '>= 0.4'} + + which@2.0.2: + resolution: {integrity: sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==} + engines: {node: '>= 8'} + hasBin: true + + word-wrap@1.2.5: + resolution: {integrity: sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==} + engines: {node: '>=0.10.0'} + + wrap-ansi@7.0.0: + resolution: {integrity: sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==} + engines: {node: '>=10'} + + wrappy@1.0.2: + resolution: {integrity: sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==} + + y18n@5.0.8: + resolution: {integrity: sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==} + engines: {node: '>=10'} + + yaml@2.8.2: + resolution: {integrity: sha512-mplynKqc1C2hTVYxd0PU2xQAc22TI1vShAYGksCCfxbn/dFwnHTNi1bvYsBTkhdUNtGIf5xNOg938rrSSYvS9A==} + engines: {node: '>= 14.6'} + hasBin: true + + yargs-parser@20.2.9: + resolution: {integrity: sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w==} + engines: {node: '>=10'} + + yargs-parser@21.1.1: + resolution: {integrity: sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==} + engines: {node: '>=12'} + + yargs@17.7.2: + resolution: {integrity: sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==} + engines: {node: '>=12'} + + yocto-queue@0.1.0: + resolution: {integrity: sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==} + engines: {node: '>=10'} + + zlibjs@0.3.1: + resolution: {integrity: sha512-+J9RrgTKOmlxFSDHo0pI1xM6BLVUv+o0ZT9ANtCxGkjIVCCUdx9alUF8Gm+dGLKbkkkidWIHFDZHDMpfITt4+w==} + + zod-to-json-schema@3.25.1: + resolution: {integrity: sha512-pM/SU9d3YAggzi6MtR4h7ruuQlqKtad8e9S0fmxcMi+ueAK5Korys/aWcV9LIIHTVbj01NdzxcnXSN+O74ZIVA==} + peerDependencies: + zod: ^3.25 || ^4 + + zod@3.25.76: + resolution: {integrity: sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ==} + +snapshots: + + '@ai-sdk/openai@1.3.24(zod@3.25.76)': + dependencies: + '@ai-sdk/provider': 1.1.3 + '@ai-sdk/provider-utils': 2.2.8(zod@3.25.76) + zod: 3.25.76 + + '@ai-sdk/provider-utils@2.2.8(zod@3.25.76)': + dependencies: + '@ai-sdk/provider': 1.1.3 + nanoid: 3.3.11 + secure-json-parse: 2.7.0 + zod: 3.25.76 + + '@ai-sdk/provider@1.1.3': + dependencies: + json-schema: 0.4.0 + + '@ai-sdk/react@1.2.12(react@19.2.4)(zod@3.25.76)': + dependencies: + '@ai-sdk/provider-utils': 2.2.8(zod@3.25.76) + '@ai-sdk/ui-utils': 1.2.11(zod@3.25.76) + react: 19.2.4 + swr: 2.4.1(react@19.2.4) + throttleit: 2.1.0 + optionalDependencies: + zod: 3.25.76 + + '@ai-sdk/ui-utils@1.2.11(zod@3.25.76)': + dependencies: + '@ai-sdk/provider': 1.1.3 + '@ai-sdk/provider-utils': 2.2.8(zod@3.25.76) + zod: 3.25.76 + zod-to-json-schema: 3.25.1(zod@3.25.76) + + '@anthropic-ai/sdk@0.32.1': + dependencies: + '@types/node': 18.19.130 + '@types/node-fetch': 2.6.13 + abort-controller: 3.0.0 + agentkeepalive: 4.6.0 + form-data-encoder: 1.7.2 + formdata-node: 4.4.1 + node-fetch: 2.7.0 + transitivePeerDependencies: + - encoding + + '@aws-crypto/sha256-browser@5.2.0': + dependencies: + '@aws-crypto/sha256-js': 5.2.0 + '@aws-crypto/supports-web-crypto': 5.2.0 + '@aws-crypto/util': 5.2.0 + '@aws-sdk/types': 3.973.4 + '@aws-sdk/util-locate-window': 3.965.4 + '@smithy/util-utf8': 2.3.0 + tslib: 2.8.1 + + '@aws-crypto/sha256-js@5.2.0': + dependencies: + '@aws-crypto/util': 5.2.0 + '@aws-sdk/types': 3.973.4 + tslib: 2.8.1 + + '@aws-crypto/supports-web-crypto@5.2.0': + dependencies: + tslib: 2.8.1 + + '@aws-crypto/util@5.2.0': + dependencies: + '@aws-sdk/types': 3.973.4 + '@smithy/util-utf8': 2.3.0 + tslib: 2.8.1 + + '@aws-sdk/client-cognito-identity@3.1000.0': + dependencies: + '@aws-crypto/sha256-browser': 5.2.0 + '@aws-crypto/sha256-js': 5.2.0 + '@aws-sdk/core': 3.973.15 + '@aws-sdk/credential-provider-node': 3.972.14 + '@aws-sdk/middleware-host-header': 3.972.6 + '@aws-sdk/middleware-logger': 3.972.6 + '@aws-sdk/middleware-recursion-detection': 3.972.6 + '@aws-sdk/middleware-user-agent': 3.972.15 + '@aws-sdk/region-config-resolver': 3.972.6 + '@aws-sdk/types': 3.973.4 + '@aws-sdk/util-endpoints': 3.996.3 + '@aws-sdk/util-user-agent-browser': 3.972.6 + '@aws-sdk/util-user-agent-node': 3.973.0 + '@smithy/config-resolver': 4.4.9 + '@smithy/core': 3.23.6 + '@smithy/fetch-http-handler': 5.3.11 + '@smithy/hash-node': 4.2.10 + '@smithy/invalid-dependency': 4.2.10 + '@smithy/middleware-content-length': 4.2.10 + '@smithy/middleware-endpoint': 4.4.20 + '@smithy/middleware-retry': 4.4.37 + '@smithy/middleware-serde': 4.2.11 + '@smithy/middleware-stack': 4.2.10 + '@smithy/node-config-provider': 4.3.10 + '@smithy/node-http-handler': 4.4.12 + '@smithy/protocol-http': 5.3.10 + '@smithy/smithy-client': 4.12.0 + '@smithy/types': 4.13.0 + '@smithy/url-parser': 4.2.10 + '@smithy/util-base64': 4.3.1 + '@smithy/util-body-length-browser': 4.2.1 + '@smithy/util-body-length-node': 4.2.2 + '@smithy/util-defaults-mode-browser': 4.3.36 + '@smithy/util-defaults-mode-node': 4.2.39 + '@smithy/util-endpoints': 3.3.1 + '@smithy/util-middleware': 4.2.10 + '@smithy/util-retry': 4.2.10 + '@smithy/util-utf8': 4.2.1 + tslib: 2.8.1 + transitivePeerDependencies: + - aws-crt + + '@aws-sdk/client-sagemaker@3.1000.0': + dependencies: + '@aws-crypto/sha256-browser': 5.2.0 + '@aws-crypto/sha256-js': 5.2.0 + '@aws-sdk/core': 3.973.15 + '@aws-sdk/credential-provider-node': 3.972.14 + '@aws-sdk/middleware-host-header': 3.972.6 + '@aws-sdk/middleware-logger': 3.972.6 + '@aws-sdk/middleware-recursion-detection': 3.972.6 + '@aws-sdk/middleware-user-agent': 3.972.15 + '@aws-sdk/region-config-resolver': 3.972.6 + '@aws-sdk/types': 3.973.4 + '@aws-sdk/util-endpoints': 3.996.3 + '@aws-sdk/util-user-agent-browser': 3.972.6 + '@aws-sdk/util-user-agent-node': 3.973.0 + '@smithy/config-resolver': 4.4.9 + '@smithy/core': 3.23.6 + '@smithy/fetch-http-handler': 5.3.11 + '@smithy/hash-node': 4.2.10 + '@smithy/invalid-dependency': 4.2.10 + '@smithy/middleware-content-length': 4.2.10 + '@smithy/middleware-endpoint': 4.4.20 + '@smithy/middleware-retry': 4.4.37 + '@smithy/middleware-serde': 4.2.11 + '@smithy/middleware-stack': 4.2.10 + '@smithy/node-config-provider': 4.3.10 + '@smithy/node-http-handler': 4.4.12 + '@smithy/protocol-http': 5.3.10 + '@smithy/smithy-client': 4.12.0 + '@smithy/types': 4.13.0 + '@smithy/url-parser': 4.2.10 + '@smithy/util-base64': 4.3.1 + '@smithy/util-body-length-browser': 4.2.1 + '@smithy/util-body-length-node': 4.2.2 + '@smithy/util-defaults-mode-browser': 4.3.36 + '@smithy/util-defaults-mode-node': 4.2.39 + '@smithy/util-endpoints': 3.3.1 + '@smithy/util-middleware': 4.2.10 + '@smithy/util-retry': 4.2.10 + '@smithy/util-utf8': 4.2.1 + '@smithy/util-waiter': 4.2.10 + tslib: 2.8.1 + transitivePeerDependencies: + - aws-crt + + '@aws-sdk/core@3.973.15': + dependencies: + '@aws-sdk/types': 3.973.4 + '@aws-sdk/xml-builder': 3.972.8 + '@smithy/core': 3.23.6 + '@smithy/node-config-provider': 4.3.10 + '@smithy/property-provider': 4.2.10 + '@smithy/protocol-http': 5.3.10 + '@smithy/signature-v4': 5.3.10 + '@smithy/smithy-client': 4.12.0 + '@smithy/types': 4.13.0 + '@smithy/util-base64': 4.3.1 + '@smithy/util-middleware': 4.2.10 + '@smithy/util-utf8': 4.2.1 + tslib: 2.8.1 + + '@aws-sdk/credential-provider-cognito-identity@3.972.6': + dependencies: + '@aws-sdk/nested-clients': 3.996.3 + '@aws-sdk/types': 3.973.4 + '@smithy/property-provider': 4.2.10 + '@smithy/types': 4.13.0 + tslib: 2.8.1 + transitivePeerDependencies: + - aws-crt + + '@aws-sdk/credential-provider-env@3.972.13': + dependencies: + '@aws-sdk/core': 3.973.15 + '@aws-sdk/types': 3.973.4 + '@smithy/property-provider': 4.2.10 + '@smithy/types': 4.13.0 + tslib: 2.8.1 + + '@aws-sdk/credential-provider-http@3.972.15': + dependencies: + '@aws-sdk/core': 3.973.15 + '@aws-sdk/types': 3.973.4 + '@smithy/fetch-http-handler': 5.3.11 + '@smithy/node-http-handler': 4.4.12 + '@smithy/property-provider': 4.2.10 + '@smithy/protocol-http': 5.3.10 + '@smithy/smithy-client': 4.12.0 + '@smithy/types': 4.13.0 + '@smithy/util-stream': 4.5.15 + tslib: 2.8.1 + + '@aws-sdk/credential-provider-ini@3.972.13': + dependencies: + '@aws-sdk/core': 3.973.15 + '@aws-sdk/credential-provider-env': 3.972.13 + '@aws-sdk/credential-provider-http': 3.972.15 + '@aws-sdk/credential-provider-login': 3.972.13 + '@aws-sdk/credential-provider-process': 3.972.13 + '@aws-sdk/credential-provider-sso': 3.972.13 + '@aws-sdk/credential-provider-web-identity': 3.972.13 + '@aws-sdk/nested-clients': 3.996.3 + '@aws-sdk/types': 3.973.4 + '@smithy/credential-provider-imds': 4.2.10 + '@smithy/property-provider': 4.2.10 + '@smithy/shared-ini-file-loader': 4.4.5 + '@smithy/types': 4.13.0 + tslib: 2.8.1 + transitivePeerDependencies: + - aws-crt + + '@aws-sdk/credential-provider-login@3.972.13': + dependencies: + '@aws-sdk/core': 3.973.15 + '@aws-sdk/nested-clients': 3.996.3 + '@aws-sdk/types': 3.973.4 + '@smithy/property-provider': 4.2.10 + '@smithy/protocol-http': 5.3.10 + '@smithy/shared-ini-file-loader': 4.4.5 + '@smithy/types': 4.13.0 + tslib: 2.8.1 + transitivePeerDependencies: + - aws-crt + + '@aws-sdk/credential-provider-node@3.972.14': + dependencies: + '@aws-sdk/credential-provider-env': 3.972.13 + '@aws-sdk/credential-provider-http': 3.972.15 + '@aws-sdk/credential-provider-ini': 3.972.13 + '@aws-sdk/credential-provider-process': 3.972.13 + '@aws-sdk/credential-provider-sso': 3.972.13 + '@aws-sdk/credential-provider-web-identity': 3.972.13 + '@aws-sdk/types': 3.973.4 + '@smithy/credential-provider-imds': 4.2.10 + '@smithy/property-provider': 4.2.10 + '@smithy/shared-ini-file-loader': 4.4.5 + '@smithy/types': 4.13.0 + tslib: 2.8.1 + transitivePeerDependencies: + - aws-crt + + '@aws-sdk/credential-provider-process@3.972.13': + dependencies: + '@aws-sdk/core': 3.973.15 + '@aws-sdk/types': 3.973.4 + '@smithy/property-provider': 4.2.10 + '@smithy/shared-ini-file-loader': 4.4.5 + '@smithy/types': 4.13.0 + tslib: 2.8.1 + + '@aws-sdk/credential-provider-sso@3.972.13': + dependencies: + '@aws-sdk/core': 3.973.15 + '@aws-sdk/nested-clients': 3.996.3 + '@aws-sdk/token-providers': 3.999.0 + '@aws-sdk/types': 3.973.4 + '@smithy/property-provider': 4.2.10 + '@smithy/shared-ini-file-loader': 4.4.5 + '@smithy/types': 4.13.0 + tslib: 2.8.1 + transitivePeerDependencies: + - aws-crt + + '@aws-sdk/credential-provider-web-identity@3.972.13': + dependencies: + '@aws-sdk/core': 3.973.15 + '@aws-sdk/nested-clients': 3.996.3 + '@aws-sdk/types': 3.973.4 + '@smithy/property-provider': 4.2.10 + '@smithy/shared-ini-file-loader': 4.4.5 + '@smithy/types': 4.13.0 + tslib: 2.8.1 + transitivePeerDependencies: + - aws-crt + + '@aws-sdk/credential-providers@3.1000.0': + dependencies: + '@aws-sdk/client-cognito-identity': 3.1000.0 + '@aws-sdk/core': 3.973.15 + '@aws-sdk/credential-provider-cognito-identity': 3.972.6 + '@aws-sdk/credential-provider-env': 3.972.13 + '@aws-sdk/credential-provider-http': 3.972.15 + '@aws-sdk/credential-provider-ini': 3.972.13 + '@aws-sdk/credential-provider-login': 3.972.13 + '@aws-sdk/credential-provider-node': 3.972.14 + '@aws-sdk/credential-provider-process': 3.972.13 + '@aws-sdk/credential-provider-sso': 3.972.13 + '@aws-sdk/credential-provider-web-identity': 3.972.13 + '@aws-sdk/nested-clients': 3.996.3 + '@aws-sdk/types': 3.973.4 + '@smithy/config-resolver': 4.4.9 + '@smithy/core': 3.23.6 + '@smithy/credential-provider-imds': 4.2.10 + '@smithy/node-config-provider': 4.3.10 + '@smithy/property-provider': 4.2.10 + '@smithy/types': 4.13.0 + tslib: 2.8.1 + transitivePeerDependencies: + - aws-crt + + '@aws-sdk/middleware-host-header@3.972.6': + dependencies: + '@aws-sdk/types': 3.973.4 + '@smithy/protocol-http': 5.3.10 + '@smithy/types': 4.13.0 + tslib: 2.8.1 + + '@aws-sdk/middleware-logger@3.972.6': + dependencies: + '@aws-sdk/types': 3.973.4 + '@smithy/types': 4.13.0 + tslib: 2.8.1 + + '@aws-sdk/middleware-recursion-detection@3.972.6': + dependencies: + '@aws-sdk/types': 3.973.4 + '@aws/lambda-invoke-store': 0.2.3 + '@smithy/protocol-http': 5.3.10 + '@smithy/types': 4.13.0 + tslib: 2.8.1 + + '@aws-sdk/middleware-user-agent@3.972.15': + dependencies: + '@aws-sdk/core': 3.973.15 + '@aws-sdk/types': 3.973.4 + '@aws-sdk/util-endpoints': 3.996.3 + '@smithy/core': 3.23.6 + '@smithy/protocol-http': 5.3.10 + '@smithy/types': 4.13.0 + tslib: 2.8.1 + + '@aws-sdk/nested-clients@3.996.3': + dependencies: + '@aws-crypto/sha256-browser': 5.2.0 + '@aws-crypto/sha256-js': 5.2.0 + '@aws-sdk/core': 3.973.15 + '@aws-sdk/middleware-host-header': 3.972.6 + '@aws-sdk/middleware-logger': 3.972.6 + '@aws-sdk/middleware-recursion-detection': 3.972.6 + '@aws-sdk/middleware-user-agent': 3.972.15 + '@aws-sdk/region-config-resolver': 3.972.6 + '@aws-sdk/types': 3.973.4 + '@aws-sdk/util-endpoints': 3.996.3 + '@aws-sdk/util-user-agent-browser': 3.972.6 + '@aws-sdk/util-user-agent-node': 3.973.0 + '@smithy/config-resolver': 4.4.9 + '@smithy/core': 3.23.6 + '@smithy/fetch-http-handler': 5.3.11 + '@smithy/hash-node': 4.2.10 + '@smithy/invalid-dependency': 4.2.10 + '@smithy/middleware-content-length': 4.2.10 + '@smithy/middleware-endpoint': 4.4.20 + '@smithy/middleware-retry': 4.4.37 + '@smithy/middleware-serde': 4.2.11 + '@smithy/middleware-stack': 4.2.10 + '@smithy/node-config-provider': 4.3.10 + '@smithy/node-http-handler': 4.4.12 + '@smithy/protocol-http': 5.3.10 + '@smithy/smithy-client': 4.12.0 + '@smithy/types': 4.13.0 + '@smithy/url-parser': 4.2.10 + '@smithy/util-base64': 4.3.1 + '@smithy/util-body-length-browser': 4.2.1 + '@smithy/util-body-length-node': 4.2.2 + '@smithy/util-defaults-mode-browser': 4.3.36 + '@smithy/util-defaults-mode-node': 4.2.39 + '@smithy/util-endpoints': 3.3.1 + '@smithy/util-middleware': 4.2.10 + '@smithy/util-retry': 4.2.10 + '@smithy/util-utf8': 4.2.1 + tslib: 2.8.1 + transitivePeerDependencies: + - aws-crt + + '@aws-sdk/region-config-resolver@3.972.6': + dependencies: + '@aws-sdk/types': 3.973.4 + '@smithy/config-resolver': 4.4.9 + '@smithy/node-config-provider': 4.3.10 + '@smithy/types': 4.13.0 + tslib: 2.8.1 + + '@aws-sdk/token-providers@3.999.0': + dependencies: + '@aws-sdk/core': 3.973.15 + '@aws-sdk/nested-clients': 3.996.3 + '@aws-sdk/types': 3.973.4 + '@smithy/property-provider': 4.2.10 + '@smithy/shared-ini-file-loader': 4.4.5 + '@smithy/types': 4.13.0 + tslib: 2.8.1 + transitivePeerDependencies: + - aws-crt + + '@aws-sdk/types@3.973.4': + dependencies: + '@smithy/types': 4.13.0 + tslib: 2.8.1 + + '@aws-sdk/util-endpoints@3.996.3': + dependencies: + '@aws-sdk/types': 3.973.4 + '@smithy/types': 4.13.0 + '@smithy/url-parser': 4.2.10 + '@smithy/util-endpoints': 3.3.1 + tslib: 2.8.1 + + '@aws-sdk/util-locate-window@3.965.4': + dependencies: + tslib: 2.8.1 + + '@aws-sdk/util-user-agent-browser@3.972.6': + dependencies: + '@aws-sdk/types': 3.973.4 + '@smithy/types': 4.13.0 + bowser: 2.14.1 + tslib: 2.8.1 + + '@aws-sdk/util-user-agent-node@3.973.0': + dependencies: + '@aws-sdk/middleware-user-agent': 3.972.15 + '@aws-sdk/types': 3.973.4 + '@smithy/node-config-provider': 4.3.10 + '@smithy/types': 4.13.0 + tslib: 2.8.1 + + '@aws-sdk/xml-builder@3.972.8': + dependencies: + '@smithy/types': 4.13.0 + fast-xml-parser: 5.3.6 + tslib: 2.8.1 + + '@aws/lambda-invoke-store@0.2.3': {} + + '@browserbasehq/sdk@2.7.0': + dependencies: + '@types/node': 18.19.130 + '@types/node-fetch': 2.6.13 + abort-controller: 3.0.0 + agentkeepalive: 4.6.0 + form-data-encoder: 1.7.2 + formdata-node: 4.4.1 + node-fetch: 2.7.0 + transitivePeerDependencies: + - encoding + + '@bufbuild/protobuf@2.11.0': {} + + '@eslint-community/eslint-utils@4.9.1(eslint@8.57.1)': + dependencies: + eslint: 8.57.1 + eslint-visitor-keys: 3.4.3 + + '@eslint-community/regexpp@4.12.2': {} + + '@eslint/eslintrc@2.1.4': + dependencies: + ajv: 6.14.0 + debug: 4.4.3 + espree: 9.6.1 + globals: 13.24.0 + ignore: 5.3.2 + import-fresh: 3.3.1 + js-yaml: 4.1.1 + minimatch: 3.1.5 + strip-json-comments: 3.1.1 + transitivePeerDependencies: + - supports-color + + '@eslint/js@8.57.1': {} + + '@google-cloud/promisify@4.1.0': {} + + '@google-cloud/vision@4.3.3': + dependencies: + '@google-cloud/promisify': 4.1.0 + google-gax: 4.6.1 + is: 3.3.2 + transitivePeerDependencies: + - encoding + - supports-color + + '@grpc/grpc-js@1.14.3': + dependencies: + '@grpc/proto-loader': 0.8.0 + '@js-sdsl/ordered-map': 4.4.2 + + '@grpc/proto-loader@0.7.15': + dependencies: + lodash.camelcase: 4.3.0 + long: 5.3.2 + protobufjs: 7.5.4 + yargs: 17.7.2 + + '@grpc/proto-loader@0.8.0': + dependencies: + lodash.camelcase: 4.3.0 + long: 5.3.2 + protobufjs: 7.5.4 + yargs: 17.7.2 + + '@hatchet-dev/typescript-sdk@file:../../typescript': + dependencies: + '@bufbuild/protobuf': 2.11.0 + '@types/qs': 6.14.0 + abort-controller-x: 0.4.3 + axios: 1.13.6 + long: 5.3.2 + nice-grpc: 2.1.14 + nice-grpc-common: 2.0.2 + protobufjs: 7.5.4 + qs: 6.15.0 + semver: 7.7.4 + yaml: 2.8.2 + zod: 3.25.76 + zod-to-json-schema: 3.25.1(zod@3.25.76) + optionalDependencies: + prom-client: 15.1.3 + transitivePeerDependencies: + - debug + + '@humanwhocodes/config-array@0.13.0': + dependencies: + '@humanwhocodes/object-schema': 2.0.3 + debug: 4.4.3 + minimatch: 3.1.5 + transitivePeerDependencies: + - supports-color + + '@humanwhocodes/module-importer@1.0.1': {} + + '@humanwhocodes/object-schema@2.0.3': {} + + '@js-sdsl/ordered-map@4.4.2': {} + + '@mendable/firecrawl-js@4.15.0': + dependencies: + axios: 1.13.6 + typescript-event-target: 1.1.2 + zod: 3.25.76 + zod-to-json-schema: 3.25.1(zod@3.25.76) + transitivePeerDependencies: + - debug + + '@nodelib/fs.scandir@2.1.5': + dependencies: + '@nodelib/fs.stat': 2.0.5 + run-parallel: 1.2.0 + + '@nodelib/fs.stat@2.0.5': {} + + '@nodelib/fs.walk@1.2.8': + dependencies: + '@nodelib/fs.scandir': 2.1.5 + fastq: 1.20.1 + + '@opentelemetry/api@1.9.0': {} + + '@protobufjs/aspromise@1.1.2': {} + + '@protobufjs/base64@1.1.2': {} + + '@protobufjs/codegen@2.0.4': {} + + '@protobufjs/eventemitter@1.1.0': {} + + '@protobufjs/fetch@1.1.0': + dependencies: + '@protobufjs/aspromise': 1.1.2 + '@protobufjs/inquire': 1.1.0 + + '@protobufjs/float@1.0.2': {} + + '@protobufjs/inquire@1.1.0': {} + + '@protobufjs/path@1.1.2': {} + + '@protobufjs/pool@1.1.0': {} + + '@protobufjs/utf8@1.1.0': {} + + '@rtsao/scc@1.1.0': {} + + '@smithy/abort-controller@4.2.10': + dependencies: + '@smithy/types': 4.13.0 + tslib: 2.8.1 + + '@smithy/config-resolver@4.4.9': + dependencies: + '@smithy/node-config-provider': 4.3.10 + '@smithy/types': 4.13.0 + '@smithy/util-config-provider': 4.2.1 + '@smithy/util-endpoints': 3.3.1 + '@smithy/util-middleware': 4.2.10 + tslib: 2.8.1 + + '@smithy/core@3.23.6': + dependencies: + '@smithy/middleware-serde': 4.2.11 + '@smithy/protocol-http': 5.3.10 + '@smithy/types': 4.13.0 + '@smithy/util-base64': 4.3.1 + '@smithy/util-body-length-browser': 4.2.1 + '@smithy/util-middleware': 4.2.10 + '@smithy/util-stream': 4.5.15 + '@smithy/util-utf8': 4.2.1 + '@smithy/uuid': 1.1.1 + tslib: 2.8.1 + + '@smithy/credential-provider-imds@4.2.10': + dependencies: + '@smithy/node-config-provider': 4.3.10 + '@smithy/property-provider': 4.2.10 + '@smithy/types': 4.13.0 + '@smithy/url-parser': 4.2.10 + tslib: 2.8.1 + + '@smithy/fetch-http-handler@5.3.11': + dependencies: + '@smithy/protocol-http': 5.3.10 + '@smithy/querystring-builder': 4.2.10 + '@smithy/types': 4.13.0 + '@smithy/util-base64': 4.3.1 + tslib: 2.8.1 + + '@smithy/hash-node@4.2.10': + dependencies: + '@smithy/types': 4.13.0 + '@smithy/util-buffer-from': 4.2.1 + '@smithy/util-utf8': 4.2.1 + tslib: 2.8.1 + + '@smithy/invalid-dependency@4.2.10': + dependencies: + '@smithy/types': 4.13.0 + tslib: 2.8.1 + + '@smithy/is-array-buffer@2.2.0': + dependencies: + tslib: 2.8.1 + + '@smithy/is-array-buffer@4.2.1': + dependencies: + tslib: 2.8.1 + + '@smithy/middleware-content-length@4.2.10': + dependencies: + '@smithy/protocol-http': 5.3.10 + '@smithy/types': 4.13.0 + tslib: 2.8.1 + + '@smithy/middleware-endpoint@4.4.20': + dependencies: + '@smithy/core': 3.23.6 + '@smithy/middleware-serde': 4.2.11 + '@smithy/node-config-provider': 4.3.10 + '@smithy/shared-ini-file-loader': 4.4.5 + '@smithy/types': 4.13.0 + '@smithy/url-parser': 4.2.10 + '@smithy/util-middleware': 4.2.10 + tslib: 2.8.1 + + '@smithy/middleware-retry@4.4.37': + dependencies: + '@smithy/node-config-provider': 4.3.10 + '@smithy/protocol-http': 5.3.10 + '@smithy/service-error-classification': 4.2.10 + '@smithy/smithy-client': 4.12.0 + '@smithy/types': 4.13.0 + '@smithy/util-middleware': 4.2.10 + '@smithy/util-retry': 4.2.10 + '@smithy/uuid': 1.1.1 + tslib: 2.8.1 + + '@smithy/middleware-serde@4.2.11': + dependencies: + '@smithy/protocol-http': 5.3.10 + '@smithy/types': 4.13.0 + tslib: 2.8.1 + + '@smithy/middleware-stack@4.2.10': + dependencies: + '@smithy/types': 4.13.0 + tslib: 2.8.1 + + '@smithy/node-config-provider@4.3.10': + dependencies: + '@smithy/property-provider': 4.2.10 + '@smithy/shared-ini-file-loader': 4.4.5 + '@smithy/types': 4.13.0 + tslib: 2.8.1 + + '@smithy/node-http-handler@4.4.12': + dependencies: + '@smithy/abort-controller': 4.2.10 + '@smithy/protocol-http': 5.3.10 + '@smithy/querystring-builder': 4.2.10 + '@smithy/types': 4.13.0 + tslib: 2.8.1 + + '@smithy/property-provider@4.2.10': + dependencies: + '@smithy/types': 4.13.0 + tslib: 2.8.1 + + '@smithy/protocol-http@5.3.10': + dependencies: + '@smithy/types': 4.13.0 + tslib: 2.8.1 + + '@smithy/querystring-builder@4.2.10': + dependencies: + '@smithy/types': 4.13.0 + '@smithy/util-uri-escape': 4.2.1 + tslib: 2.8.1 + + '@smithy/querystring-parser@4.2.10': + dependencies: + '@smithy/types': 4.13.0 + tslib: 2.8.1 + + '@smithy/service-error-classification@4.2.10': + dependencies: + '@smithy/types': 4.13.0 + + '@smithy/shared-ini-file-loader@4.4.5': + dependencies: + '@smithy/types': 4.13.0 + tslib: 2.8.1 + + '@smithy/signature-v4@5.3.10': + dependencies: + '@smithy/is-array-buffer': 4.2.1 + '@smithy/protocol-http': 5.3.10 + '@smithy/types': 4.13.0 + '@smithy/util-hex-encoding': 4.2.1 + '@smithy/util-middleware': 4.2.10 + '@smithy/util-uri-escape': 4.2.1 + '@smithy/util-utf8': 4.2.1 + tslib: 2.8.1 + + '@smithy/smithy-client@4.12.0': + dependencies: + '@smithy/core': 3.23.6 + '@smithy/middleware-endpoint': 4.4.20 + '@smithy/middleware-stack': 4.2.10 + '@smithy/protocol-http': 5.3.10 + '@smithy/types': 4.13.0 + '@smithy/util-stream': 4.5.15 + tslib: 2.8.1 + + '@smithy/types@4.13.0': + dependencies: + tslib: 2.8.1 + + '@smithy/url-parser@4.2.10': + dependencies: + '@smithy/querystring-parser': 4.2.10 + '@smithy/types': 4.13.0 + tslib: 2.8.1 + + '@smithy/util-base64@4.3.1': + dependencies: + '@smithy/util-buffer-from': 4.2.1 + '@smithy/util-utf8': 4.2.1 + tslib: 2.8.1 + + '@smithy/util-body-length-browser@4.2.1': + dependencies: + tslib: 2.8.1 + + '@smithy/util-body-length-node@4.2.2': + dependencies: + tslib: 2.8.1 + + '@smithy/util-buffer-from@2.2.0': + dependencies: + '@smithy/is-array-buffer': 2.2.0 + tslib: 2.8.1 + + '@smithy/util-buffer-from@4.2.1': + dependencies: + '@smithy/is-array-buffer': 4.2.1 + tslib: 2.8.1 + + '@smithy/util-config-provider@4.2.1': + dependencies: + tslib: 2.8.1 + + '@smithy/util-defaults-mode-browser@4.3.36': + dependencies: + '@smithy/property-provider': 4.2.10 + '@smithy/smithy-client': 4.12.0 + '@smithy/types': 4.13.0 + tslib: 2.8.1 + + '@smithy/util-defaults-mode-node@4.2.39': + dependencies: + '@smithy/config-resolver': 4.4.9 + '@smithy/credential-provider-imds': 4.2.10 + '@smithy/node-config-provider': 4.3.10 + '@smithy/property-provider': 4.2.10 + '@smithy/smithy-client': 4.12.0 + '@smithy/types': 4.13.0 + tslib: 2.8.1 + + '@smithy/util-endpoints@3.3.1': + dependencies: + '@smithy/node-config-provider': 4.3.10 + '@smithy/types': 4.13.0 + tslib: 2.8.1 + + '@smithy/util-hex-encoding@4.2.1': + dependencies: + tslib: 2.8.1 + + '@smithy/util-middleware@4.2.10': + dependencies: + '@smithy/types': 4.13.0 + tslib: 2.8.1 + + '@smithy/util-retry@4.2.10': + dependencies: + '@smithy/service-error-classification': 4.2.10 + '@smithy/types': 4.13.0 + tslib: 2.8.1 + + '@smithy/util-stream@4.5.15': + dependencies: + '@smithy/fetch-http-handler': 5.3.11 + '@smithy/node-http-handler': 4.4.12 + '@smithy/types': 4.13.0 + '@smithy/util-base64': 4.3.1 + '@smithy/util-buffer-from': 4.2.1 + '@smithy/util-hex-encoding': 4.2.1 + '@smithy/util-utf8': 4.2.1 + tslib: 2.8.1 + + '@smithy/util-uri-escape@4.2.1': + dependencies: + tslib: 2.8.1 + + '@smithy/util-utf8@2.3.0': + dependencies: + '@smithy/util-buffer-from': 2.2.0 + tslib: 2.8.1 + + '@smithy/util-utf8@4.2.1': + dependencies: + '@smithy/util-buffer-from': 4.2.1 + tslib: 2.8.1 + + '@smithy/util-waiter@4.2.10': + dependencies: + '@smithy/abort-controller': 4.2.10 + '@smithy/types': 4.13.0 + tslib: 2.8.1 + + '@smithy/uuid@1.1.1': + dependencies: + tslib: 2.8.1 + + '@tootallnate/once@2.0.0': {} + + '@types/caseless@0.12.5': {} + + '@types/diff-match-patch@1.0.36': {} + + '@types/json-schema@7.0.15': {} + + '@types/json5@0.0.29': {} + + '@types/long@4.0.2': {} + + '@types/node-fetch@2.6.13': + dependencies: + '@types/node': 22.19.13 + form-data: 4.0.5 + + '@types/node@18.19.130': + dependencies: + undici-types: 5.26.5 + + '@types/node@22.19.13': + dependencies: + undici-types: 6.21.0 + + '@types/qs@6.14.0': {} + + '@types/request@2.48.13': + dependencies: + '@types/caseless': 0.12.5 + '@types/node': 22.19.13 + '@types/tough-cookie': 4.0.5 + form-data: 2.5.5 + + '@types/semver@7.7.1': {} + + '@types/tough-cookie@4.0.5': {} + + '@typescript-eslint/eslint-plugin@6.21.0(@typescript-eslint/parser@6.21.0(eslint@8.57.1)(typescript@5.9.3))(eslint@8.57.1)(typescript@5.9.3)': + dependencies: + '@eslint-community/regexpp': 4.12.2 + '@typescript-eslint/parser': 6.21.0(eslint@8.57.1)(typescript@5.9.3) + '@typescript-eslint/scope-manager': 6.21.0 + '@typescript-eslint/type-utils': 6.21.0(eslint@8.57.1)(typescript@5.9.3) + '@typescript-eslint/utils': 6.21.0(eslint@8.57.1)(typescript@5.9.3) + '@typescript-eslint/visitor-keys': 6.21.0 + debug: 4.4.3 + eslint: 8.57.1 + graphemer: 1.4.0 + ignore: 5.3.2 + natural-compare: 1.4.0 + semver: 7.7.4 + ts-api-utils: 1.4.3(typescript@5.9.3) + optionalDependencies: + typescript: 5.9.3 + transitivePeerDependencies: + - supports-color + + '@typescript-eslint/parser@6.21.0(eslint@8.57.1)(typescript@5.9.3)': + dependencies: + '@typescript-eslint/scope-manager': 6.21.0 + '@typescript-eslint/types': 6.21.0 + '@typescript-eslint/typescript-estree': 6.21.0(typescript@5.9.3) + '@typescript-eslint/visitor-keys': 6.21.0 + debug: 4.4.3 + eslint: 8.57.1 + optionalDependencies: + typescript: 5.9.3 + transitivePeerDependencies: + - supports-color + + '@typescript-eslint/scope-manager@6.21.0': + dependencies: + '@typescript-eslint/types': 6.21.0 + '@typescript-eslint/visitor-keys': 6.21.0 + + '@typescript-eslint/type-utils@6.21.0(eslint@8.57.1)(typescript@5.9.3)': + dependencies: + '@typescript-eslint/typescript-estree': 6.21.0(typescript@5.9.3) + '@typescript-eslint/utils': 6.21.0(eslint@8.57.1)(typescript@5.9.3) + debug: 4.4.3 + eslint: 8.57.1 + ts-api-utils: 1.4.3(typescript@5.9.3) + optionalDependencies: + typescript: 5.9.3 + transitivePeerDependencies: + - supports-color + + '@typescript-eslint/types@6.21.0': {} + + '@typescript-eslint/typescript-estree@6.21.0(typescript@5.9.3)': + dependencies: + '@typescript-eslint/types': 6.21.0 + '@typescript-eslint/visitor-keys': 6.21.0 + debug: 4.4.3 + globby: 11.1.0 + is-glob: 4.0.3 + minimatch: 9.0.3 + semver: 7.7.4 + ts-api-utils: 1.4.3(typescript@5.9.3) + optionalDependencies: + typescript: 5.9.3 + transitivePeerDependencies: + - supports-color + + '@typescript-eslint/utils@6.21.0(eslint@8.57.1)(typescript@5.9.3)': + dependencies: + '@eslint-community/eslint-utils': 4.9.1(eslint@8.57.1) + '@types/json-schema': 7.0.15 + '@types/semver': 7.7.1 + '@typescript-eslint/scope-manager': 6.21.0 + '@typescript-eslint/types': 6.21.0 + '@typescript-eslint/typescript-estree': 6.21.0(typescript@5.9.3) + eslint: 8.57.1 + semver: 7.7.4 + transitivePeerDependencies: + - supports-color + - typescript + + '@typescript-eslint/visitor-keys@6.21.0': + dependencies: + '@typescript-eslint/types': 6.21.0 + eslint-visitor-keys: 3.4.3 + + '@ungap/structured-clone@1.3.0': {} + + abort-controller-x@0.4.3: {} + + abort-controller@3.0.0: + dependencies: + event-target-shim: 5.0.1 + + acorn-jsx@5.3.2(acorn@8.16.0): + dependencies: + acorn: 8.16.0 + + acorn@8.16.0: {} + + agent-base@6.0.2: + dependencies: + debug: 4.4.3 + transitivePeerDependencies: + - supports-color + + agent-base@7.1.4: {} + + agentkeepalive@4.6.0: + dependencies: + humanize-ms: 1.2.1 + + ai@4.3.19(react@19.2.4)(zod@3.25.76): + dependencies: + '@ai-sdk/provider': 1.1.3 + '@ai-sdk/provider-utils': 2.2.8(zod@3.25.76) + '@ai-sdk/react': 1.2.12(react@19.2.4)(zod@3.25.76) + '@ai-sdk/ui-utils': 1.2.11(zod@3.25.76) + '@opentelemetry/api': 1.9.0 + jsondiffpatch: 0.6.0 + zod: 3.25.76 + optionalDependencies: + react: 19.2.4 + + ajv@6.14.0: + dependencies: + fast-deep-equal: 3.1.3 + fast-json-stable-stringify: 2.1.0 + json-schema-traverse: 0.4.1 + uri-js: 4.4.1 + + ansi-regex@5.0.1: {} + + ansi-styles@4.3.0: + dependencies: + color-convert: 2.0.1 + + argparse@2.0.1: {} + + array-buffer-byte-length@1.0.2: + dependencies: + call-bound: 1.0.4 + is-array-buffer: 3.0.5 + + array-includes@3.1.9: + dependencies: + call-bind: 1.0.8 + call-bound: 1.0.4 + define-properties: 1.2.1 + es-abstract: 1.24.1 + es-object-atoms: 1.1.1 + get-intrinsic: 1.3.0 + is-string: 1.1.1 + math-intrinsics: 1.1.0 + + array-union@2.1.0: {} + + array.prototype.findlastindex@1.2.6: + dependencies: + call-bind: 1.0.8 + call-bound: 1.0.4 + define-properties: 1.2.1 + es-abstract: 1.24.1 + es-errors: 1.3.0 + es-object-atoms: 1.1.1 + es-shim-unscopables: 1.1.0 + + array.prototype.flat@1.3.3: + dependencies: + call-bind: 1.0.8 + define-properties: 1.2.1 + es-abstract: 1.24.1 + es-shim-unscopables: 1.1.0 + + array.prototype.flatmap@1.3.3: + dependencies: + call-bind: 1.0.8 + define-properties: 1.2.1 + es-abstract: 1.24.1 + es-shim-unscopables: 1.1.0 + + arraybuffer.prototype.slice@1.0.4: + dependencies: + array-buffer-byte-length: 1.0.2 + call-bind: 1.0.8 + define-properties: 1.2.1 + es-abstract: 1.24.1 + es-errors: 1.3.0 + get-intrinsic: 1.3.0 + is-array-buffer: 3.0.5 + + async-function@1.0.0: {} + + asynckit@0.4.0: {} + + available-typed-arrays@1.0.7: + dependencies: + possible-typed-array-names: 1.1.0 + + axios@1.13.6: + dependencies: + follow-redirects: 1.15.11 + form-data: 4.0.5 + proxy-from-env: 1.1.0 + transitivePeerDependencies: + - debug + + balanced-match@1.0.2: {} + + base64-js@1.5.1: {} + + bignumber.js@9.3.1: {} + + bintrees@1.0.2: + optional: true + + bmp-js@0.1.0: {} + + bowser@2.14.1: {} + + brace-expansion@1.1.12: + dependencies: + balanced-match: 1.0.2 + concat-map: 0.0.1 + + brace-expansion@2.0.2: + dependencies: + balanced-match: 1.0.2 + + braces@3.0.3: + dependencies: + fill-range: 7.1.1 + + buffer-equal-constant-time@1.0.1: {} + + buffer@6.0.3: + dependencies: + base64-js: 1.5.1 + ieee754: 1.2.1 + + call-bind-apply-helpers@1.0.2: + dependencies: + es-errors: 1.3.0 + function-bind: 1.1.2 + + call-bind@1.0.8: + dependencies: + call-bind-apply-helpers: 1.0.2 + es-define-property: 1.0.1 + get-intrinsic: 1.3.0 + set-function-length: 1.2.2 + + call-bound@1.0.4: + dependencies: + call-bind-apply-helpers: 1.0.2 + get-intrinsic: 1.3.0 + + callsites@3.1.0: {} + + chalk@4.1.2: + dependencies: + ansi-styles: 4.3.0 + supports-color: 7.2.0 + + chalk@5.6.2: {} + + cliui@8.0.1: + dependencies: + string-width: 4.2.3 + strip-ansi: 6.0.1 + wrap-ansi: 7.0.0 + + cohere-ai@7.20.0: + dependencies: + '@aws-crypto/sha256-js': 5.2.0 + '@aws-sdk/client-sagemaker': 3.1000.0 + '@aws-sdk/credential-providers': 3.1000.0 + '@smithy/protocol-http': 5.3.10 + '@smithy/signature-v4': 5.3.10 + convict: 6.2.4 + form-data: 4.0.5 + form-data-encoder: 4.1.0 + formdata-node: 6.0.3 + readable-stream: 4.7.0 + transitivePeerDependencies: + - aws-crt + + color-convert@2.0.1: + dependencies: + color-name: 1.1.4 + + color-name@1.1.4: {} + + combined-stream@1.0.8: + dependencies: + delayed-stream: 1.0.0 + + concat-map@0.0.1: {} + + confusing-browser-globals@1.0.11: {} + + convict@6.2.4: + dependencies: + lodash.clonedeep: 4.5.0 + yargs-parser: 20.2.9 + + cross-spawn@7.0.6: + dependencies: + path-key: 3.1.1 + shebang-command: 2.0.0 + which: 2.0.2 + + data-view-buffer@1.0.2: + dependencies: + call-bound: 1.0.4 + es-errors: 1.3.0 + is-data-view: 1.0.2 + + data-view-byte-length@1.0.2: + dependencies: + call-bound: 1.0.4 + es-errors: 1.3.0 + is-data-view: 1.0.2 + + data-view-byte-offset@1.0.1: + dependencies: + call-bound: 1.0.4 + es-errors: 1.3.0 + is-data-view: 1.0.2 + + debug@3.2.7: + dependencies: + ms: 2.1.3 + + debug@4.4.3: + dependencies: + ms: 2.1.3 + + deep-is@0.1.4: {} + + define-data-property@1.1.4: + dependencies: + es-define-property: 1.0.1 + es-errors: 1.3.0 + gopd: 1.2.0 + + define-properties@1.2.1: + dependencies: + define-data-property: 1.1.4 + has-property-descriptors: 1.0.2 + object-keys: 1.1.1 + + delayed-stream@1.0.0: {} + + dequal@2.0.3: {} + + diff-match-patch@1.0.5: {} + + dir-glob@3.0.1: + dependencies: + path-type: 4.0.0 + + doctrine@2.1.0: + dependencies: + esutils: 2.0.3 + + doctrine@3.0.0: + dependencies: + esutils: 2.0.3 + + dunder-proto@1.0.1: + dependencies: + call-bind-apply-helpers: 1.0.2 + es-errors: 1.3.0 + gopd: 1.2.0 + + duplexify@4.1.3: + dependencies: + end-of-stream: 1.4.5 + inherits: 2.0.4 + readable-stream: 3.6.2 + stream-shift: 1.0.3 + + ecdsa-sig-formatter@1.0.11: + dependencies: + safe-buffer: 5.2.1 + + emoji-regex@8.0.0: {} + + end-of-stream@1.4.5: + dependencies: + once: 1.4.0 + + es-abstract@1.24.1: + dependencies: + array-buffer-byte-length: 1.0.2 + arraybuffer.prototype.slice: 1.0.4 + available-typed-arrays: 1.0.7 + call-bind: 1.0.8 + call-bound: 1.0.4 + data-view-buffer: 1.0.2 + data-view-byte-length: 1.0.2 + data-view-byte-offset: 1.0.1 + es-define-property: 1.0.1 + es-errors: 1.3.0 + es-object-atoms: 1.1.1 + es-set-tostringtag: 2.1.0 + es-to-primitive: 1.3.0 + function.prototype.name: 1.1.8 + get-intrinsic: 1.3.0 + get-proto: 1.0.1 + get-symbol-description: 1.1.0 + globalthis: 1.0.4 + gopd: 1.2.0 + has-property-descriptors: 1.0.2 + has-proto: 1.2.0 + has-symbols: 1.1.0 + hasown: 2.0.2 + internal-slot: 1.1.0 + is-array-buffer: 3.0.5 + is-callable: 1.2.7 + is-data-view: 1.0.2 + is-negative-zero: 2.0.3 + is-regex: 1.2.1 + is-set: 2.0.3 + is-shared-array-buffer: 1.0.4 + is-string: 1.1.1 + is-typed-array: 1.1.15 + is-weakref: 1.1.1 + math-intrinsics: 1.1.0 + object-inspect: 1.13.4 + object-keys: 1.1.1 + object.assign: 4.1.7 + own-keys: 1.0.1 + regexp.prototype.flags: 1.5.4 + safe-array-concat: 1.1.3 + safe-push-apply: 1.0.0 + safe-regex-test: 1.1.0 + set-proto: 1.0.0 + stop-iteration-iterator: 1.1.0 + string.prototype.trim: 1.2.10 + string.prototype.trimend: 1.0.9 + string.prototype.trimstart: 1.0.8 + typed-array-buffer: 1.0.3 + typed-array-byte-length: 1.0.3 + typed-array-byte-offset: 1.0.4 + typed-array-length: 1.0.7 + unbox-primitive: 1.1.0 + which-typed-array: 1.1.20 + + es-define-property@1.0.1: {} + + es-errors@1.3.0: {} + + es-object-atoms@1.1.1: + dependencies: + es-errors: 1.3.0 + + es-set-tostringtag@2.1.0: + dependencies: + es-errors: 1.3.0 + get-intrinsic: 1.3.0 + has-tostringtag: 1.0.2 + hasown: 2.0.2 + + es-shim-unscopables@1.1.0: + dependencies: + hasown: 2.0.2 + + es-to-primitive@1.3.0: + dependencies: + is-callable: 1.2.7 + is-date-object: 1.1.0 + is-symbol: 1.1.1 + + escalade@3.2.0: {} + + escape-string-regexp@4.0.0: {} + + eslint-config-airbnb-base@15.0.0(eslint-plugin-import@2.32.0(@typescript-eslint/parser@6.21.0(eslint@8.57.1)(typescript@5.9.3))(eslint@8.57.1))(eslint@8.57.1): + dependencies: + confusing-browser-globals: 1.0.11 + eslint: 8.57.1 + eslint-plugin-import: 2.32.0(@typescript-eslint/parser@6.21.0(eslint@8.57.1)(typescript@5.9.3))(eslint@8.57.1) + object.assign: 4.1.7 + object.entries: 1.1.9 + semver: 6.3.1 + + eslint-config-prettier@9.1.2(eslint@8.57.1): + dependencies: + eslint: 8.57.1 + + eslint-import-resolver-node@0.3.9: + dependencies: + debug: 3.2.7 + is-core-module: 2.16.1 + resolve: 1.22.11 + transitivePeerDependencies: + - supports-color + + eslint-module-utils@2.12.1(@typescript-eslint/parser@6.21.0(eslint@8.57.1)(typescript@5.9.3))(eslint-import-resolver-node@0.3.9)(eslint@8.57.1): + dependencies: + debug: 3.2.7 + optionalDependencies: + '@typescript-eslint/parser': 6.21.0(eslint@8.57.1)(typescript@5.9.3) + eslint: 8.57.1 + eslint-import-resolver-node: 0.3.9 + transitivePeerDependencies: + - supports-color + + eslint-plugin-import@2.32.0(@typescript-eslint/parser@6.21.0(eslint@8.57.1)(typescript@5.9.3))(eslint@8.57.1): + dependencies: + '@rtsao/scc': 1.1.0 + array-includes: 3.1.9 + array.prototype.findlastindex: 1.2.6 + array.prototype.flat: 1.3.3 + array.prototype.flatmap: 1.3.3 + debug: 3.2.7 + doctrine: 2.1.0 + eslint: 8.57.1 + eslint-import-resolver-node: 0.3.9 + eslint-module-utils: 2.12.1(@typescript-eslint/parser@6.21.0(eslint@8.57.1)(typescript@5.9.3))(eslint-import-resolver-node@0.3.9)(eslint@8.57.1) + hasown: 2.0.2 + is-core-module: 2.16.1 + is-glob: 4.0.3 + minimatch: 3.1.5 + object.fromentries: 2.0.8 + object.groupby: 1.0.3 + object.values: 1.2.1 + semver: 6.3.1 + string.prototype.trimend: 1.0.9 + tsconfig-paths: 3.15.0 + optionalDependencies: + '@typescript-eslint/parser': 6.21.0(eslint@8.57.1)(typescript@5.9.3) + transitivePeerDependencies: + - eslint-import-resolver-typescript + - eslint-import-resolver-webpack + - supports-color + + eslint-scope@7.2.2: + dependencies: + esrecurse: 4.3.0 + estraverse: 5.3.0 + + eslint-visitor-keys@3.4.3: {} + + eslint@8.57.1: + dependencies: + '@eslint-community/eslint-utils': 4.9.1(eslint@8.57.1) + '@eslint-community/regexpp': 4.12.2 + '@eslint/eslintrc': 2.1.4 + '@eslint/js': 8.57.1 + '@humanwhocodes/config-array': 0.13.0 + '@humanwhocodes/module-importer': 1.0.1 + '@nodelib/fs.walk': 1.2.8 + '@ungap/structured-clone': 1.3.0 + ajv: 6.14.0 + chalk: 4.1.2 + cross-spawn: 7.0.6 + debug: 4.4.3 + doctrine: 3.0.0 + escape-string-regexp: 4.0.0 + eslint-scope: 7.2.2 + eslint-visitor-keys: 3.4.3 + espree: 9.6.1 + esquery: 1.7.0 + esutils: 2.0.3 + fast-deep-equal: 3.1.3 + file-entry-cache: 6.0.1 + find-up: 5.0.0 + glob-parent: 6.0.2 + globals: 13.24.0 + graphemer: 1.4.0 + ignore: 5.3.2 + imurmurhash: 0.1.4 + is-glob: 4.0.3 + is-path-inside: 3.0.3 + js-yaml: 4.1.1 + json-stable-stringify-without-jsonify: 1.0.1 + levn: 0.4.1 + lodash.merge: 4.6.2 + minimatch: 3.1.5 + natural-compare: 1.4.0 + optionator: 0.9.4 + strip-ansi: 6.0.1 + text-table: 0.2.0 + transitivePeerDependencies: + - supports-color + + espree@9.6.1: + dependencies: + acorn: 8.16.0 + acorn-jsx: 5.3.2(acorn@8.16.0) + eslint-visitor-keys: 3.4.3 + + esquery@1.7.0: + dependencies: + estraverse: 5.3.0 + + esrecurse@4.3.0: + dependencies: + estraverse: 5.3.0 + + estraverse@5.3.0: {} + + esutils@2.0.3: {} + + event-target-shim@5.0.1: {} + + events@3.3.0: {} + + extend@3.0.2: {} + + fast-deep-equal@3.1.3: {} + + fast-glob@3.3.3: + dependencies: + '@nodelib/fs.stat': 2.0.5 + '@nodelib/fs.walk': 1.2.8 + glob-parent: 5.1.2 + merge2: 1.4.1 + micromatch: 4.0.8 + + fast-json-stable-stringify@2.1.0: {} + + fast-levenshtein@2.0.6: {} + + fast-xml-parser@5.3.6: + dependencies: + strnum: 2.2.0 + + fastq@1.20.1: + dependencies: + reusify: 1.1.0 + + file-entry-cache@6.0.1: + dependencies: + flat-cache: 3.2.0 + + fill-range@7.1.1: + dependencies: + to-regex-range: 5.0.1 + + find-up@5.0.0: + dependencies: + locate-path: 6.0.0 + path-exists: 4.0.0 + + flat-cache@3.2.0: + dependencies: + flatted: 3.3.3 + keyv: 4.5.4 + rimraf: 3.0.2 + + flatted@3.3.3: {} + + follow-redirects@1.15.11: {} + + for-each@0.3.5: + dependencies: + is-callable: 1.2.7 + + form-data-encoder@1.7.2: {} + + form-data-encoder@4.1.0: {} + + form-data@2.5.5: + dependencies: + asynckit: 0.4.0 + combined-stream: 1.0.8 + es-set-tostringtag: 2.1.0 + hasown: 2.0.2 + mime-types: 2.1.35 + safe-buffer: 5.2.1 + + form-data@4.0.5: + dependencies: + asynckit: 0.4.0 + combined-stream: 1.0.8 + es-set-tostringtag: 2.1.0 + hasown: 2.0.2 + mime-types: 2.1.35 + + formdata-node@4.4.1: + dependencies: + node-domexception: 1.0.0 + web-streams-polyfill: 4.0.0-beta.3 + + formdata-node@6.0.3: {} + + fs.realpath@1.0.0: {} + + fsevents@2.3.2: + optional: true + + function-bind@1.1.2: {} + + function.prototype.name@1.1.8: + dependencies: + call-bind: 1.0.8 + call-bound: 1.0.4 + define-properties: 1.2.1 + functions-have-names: 1.2.3 + hasown: 2.0.2 + is-callable: 1.2.7 + + functions-have-names@1.2.3: {} + + gaxios@6.7.1: + dependencies: + extend: 3.0.2 + https-proxy-agent: 7.0.6 + is-stream: 2.0.1 + node-fetch: 2.7.0 + uuid: 9.0.1 + transitivePeerDependencies: + - encoding + - supports-color + + gcp-metadata@6.1.1: + dependencies: + gaxios: 6.7.1 + google-logging-utils: 0.0.2 + json-bigint: 1.0.0 + transitivePeerDependencies: + - encoding + - supports-color + + generator-function@2.0.1: {} + + get-caller-file@2.0.5: {} + + get-intrinsic@1.3.0: + dependencies: + call-bind-apply-helpers: 1.0.2 + es-define-property: 1.0.1 + es-errors: 1.3.0 + es-object-atoms: 1.1.1 + function-bind: 1.1.2 + get-proto: 1.0.1 + gopd: 1.2.0 + has-symbols: 1.1.0 + hasown: 2.0.2 + math-intrinsics: 1.1.0 + + get-proto@1.0.1: + dependencies: + dunder-proto: 1.0.1 + es-object-atoms: 1.1.1 + + get-symbol-description@1.1.0: + dependencies: + call-bound: 1.0.4 + es-errors: 1.3.0 + get-intrinsic: 1.3.0 + + glob-parent@5.1.2: + dependencies: + is-glob: 4.0.3 + + glob-parent@6.0.2: + dependencies: + is-glob: 4.0.3 + + glob@7.2.3: + dependencies: + fs.realpath: 1.0.0 + inflight: 1.0.6 + inherits: 2.0.4 + minimatch: 3.1.5 + once: 1.4.0 + path-is-absolute: 1.0.1 + + globals@13.24.0: + dependencies: + type-fest: 0.20.2 + + globalthis@1.0.4: + dependencies: + define-properties: 1.2.1 + gopd: 1.2.0 + + globby@11.1.0: + dependencies: + array-union: 2.1.0 + dir-glob: 3.0.1 + fast-glob: 3.3.3 + ignore: 5.3.2 + merge2: 1.4.1 + slash: 3.0.0 + + google-auth-library@9.15.1: + dependencies: + base64-js: 1.5.1 + ecdsa-sig-formatter: 1.0.11 + gaxios: 6.7.1 + gcp-metadata: 6.1.1 + gtoken: 7.1.0 + jws: 4.0.1 + transitivePeerDependencies: + - encoding + - supports-color + + google-gax@4.6.1: + dependencies: + '@grpc/grpc-js': 1.14.3 + '@grpc/proto-loader': 0.7.15 + '@types/long': 4.0.2 + abort-controller: 3.0.0 + duplexify: 4.1.3 + google-auth-library: 9.15.1 + node-fetch: 2.7.0 + object-hash: 3.0.0 + proto3-json-serializer: 2.0.2 + protobufjs: 7.5.4 + retry-request: 7.0.2 + uuid: 9.0.1 + transitivePeerDependencies: + - encoding + - supports-color + + google-logging-utils@0.0.2: {} + + gopd@1.2.0: {} + + graphemer@1.4.0: {} + + groq-sdk@0.5.0: + dependencies: + '@types/node': 18.19.130 + '@types/node-fetch': 2.6.13 + abort-controller: 3.0.0 + agentkeepalive: 4.6.0 + form-data-encoder: 1.7.2 + formdata-node: 4.4.1 + node-fetch: 2.7.0 + web-streams-polyfill: 3.3.3 + transitivePeerDependencies: + - encoding + + gtoken@7.1.0: + dependencies: + gaxios: 6.7.1 + jws: 4.0.1 + transitivePeerDependencies: + - encoding + - supports-color + + has-bigints@1.1.0: {} + + has-flag@4.0.0: {} + + has-property-descriptors@1.0.2: + dependencies: + es-define-property: 1.0.1 + + has-proto@1.2.0: + dependencies: + dunder-proto: 1.0.1 + + has-symbols@1.1.0: {} + + has-tostringtag@1.0.2: + dependencies: + has-symbols: 1.1.0 + + hasown@2.0.2: + dependencies: + function-bind: 1.1.2 + + http-proxy-agent@5.0.0: + dependencies: + '@tootallnate/once': 2.0.0 + agent-base: 6.0.2 + debug: 4.4.3 + transitivePeerDependencies: + - supports-color + + https-proxy-agent@5.0.1: + dependencies: + agent-base: 6.0.2 + debug: 4.4.3 + transitivePeerDependencies: + - supports-color + + https-proxy-agent@7.0.6: + dependencies: + agent-base: 7.1.4 + debug: 4.4.3 + transitivePeerDependencies: + - supports-color + + humanize-ms@1.2.1: + dependencies: + ms: 2.1.3 + + idb-keyval@6.2.2: {} + + ieee754@1.2.1: {} + + ignore@5.3.2: {} + + import-fresh@3.3.1: + dependencies: + parent-module: 1.0.1 + resolve-from: 4.0.0 + + imurmurhash@0.1.4: {} + + inflight@1.0.6: + dependencies: + once: 1.4.0 + wrappy: 1.0.2 + + inherits@2.0.4: {} + + internal-slot@1.1.0: + dependencies: + es-errors: 1.3.0 + hasown: 2.0.2 + side-channel: 1.1.0 + + is-array-buffer@3.0.5: + dependencies: + call-bind: 1.0.8 + call-bound: 1.0.4 + get-intrinsic: 1.3.0 + + is-async-function@2.1.1: + dependencies: + async-function: 1.0.0 + call-bound: 1.0.4 + get-proto: 1.0.1 + has-tostringtag: 1.0.2 + safe-regex-test: 1.1.0 + + is-bigint@1.1.0: + dependencies: + has-bigints: 1.1.0 + + is-boolean-object@1.2.2: + dependencies: + call-bound: 1.0.4 + has-tostringtag: 1.0.2 + + is-callable@1.2.7: {} + + is-core-module@2.16.1: + dependencies: + hasown: 2.0.2 + + is-data-view@1.0.2: + dependencies: + call-bound: 1.0.4 + get-intrinsic: 1.3.0 + is-typed-array: 1.1.15 + + is-date-object@1.1.0: + dependencies: + call-bound: 1.0.4 + has-tostringtag: 1.0.2 + + is-electron@2.2.2: {} + + is-extglob@2.1.1: {} + + is-finalizationregistry@1.1.1: + dependencies: + call-bound: 1.0.4 + + is-fullwidth-code-point@3.0.0: {} + + is-generator-function@1.1.2: + dependencies: + call-bound: 1.0.4 + generator-function: 2.0.1 + get-proto: 1.0.1 + has-tostringtag: 1.0.2 + safe-regex-test: 1.1.0 + + is-glob@4.0.3: + dependencies: + is-extglob: 2.1.1 + + is-map@2.0.3: {} + + is-negative-zero@2.0.3: {} + + is-number-object@1.1.1: + dependencies: + call-bound: 1.0.4 + has-tostringtag: 1.0.2 + + is-number@7.0.0: {} + + is-path-inside@3.0.3: {} + + is-regex@1.2.1: + dependencies: + call-bound: 1.0.4 + gopd: 1.2.0 + has-tostringtag: 1.0.2 + hasown: 2.0.2 + + is-set@2.0.3: {} + + is-shared-array-buffer@1.0.4: + dependencies: + call-bound: 1.0.4 + + is-stream@2.0.1: {} + + is-string@1.1.1: + dependencies: + call-bound: 1.0.4 + has-tostringtag: 1.0.2 + + is-symbol@1.1.1: + dependencies: + call-bound: 1.0.4 + has-symbols: 1.1.0 + safe-regex-test: 1.1.0 + + is-typed-array@1.1.15: + dependencies: + which-typed-array: 1.1.20 + + is-url@1.2.4: {} + + is-weakmap@2.0.2: {} + + is-weakref@1.1.1: + dependencies: + call-bound: 1.0.4 + + is-weakset@2.0.4: + dependencies: + call-bound: 1.0.4 + get-intrinsic: 1.3.0 + + is@3.3.2: {} + + isarray@2.0.5: {} + + isexe@2.0.0: {} + + js-yaml@4.1.1: + dependencies: + argparse: 2.0.1 + + json-bigint@1.0.0: + dependencies: + bignumber.js: 9.3.1 + + json-buffer@3.0.1: {} + + json-schema-traverse@0.4.1: {} + + json-schema@0.4.0: {} + + json-stable-stringify-without-jsonify@1.0.1: {} + + json5@1.0.2: + dependencies: + minimist: 1.2.8 + + jsondiffpatch@0.6.0: + dependencies: + '@types/diff-match-patch': 1.0.36 + chalk: 5.6.2 + diff-match-patch: 1.0.5 + + jwa@2.0.1: + dependencies: + buffer-equal-constant-time: 1.0.1 + ecdsa-sig-formatter: 1.0.11 + safe-buffer: 5.2.1 + + jws@4.0.1: + dependencies: + jwa: 2.0.1 + safe-buffer: 5.2.1 + + keyv@4.5.4: + dependencies: + json-buffer: 3.0.1 + + levn@0.4.1: + dependencies: + prelude-ls: 1.2.1 + type-check: 0.4.0 + + locate-path@6.0.0: + dependencies: + p-locate: 5.0.0 + + lodash.camelcase@4.3.0: {} + + lodash.clonedeep@4.5.0: {} + + lodash.merge@4.6.2: {} + + long@5.3.2: {} + + math-intrinsics@1.1.0: {} + + merge2@1.4.1: {} + + micromatch@4.0.8: + dependencies: + braces: 3.0.3 + picomatch: 2.3.1 + + mime-db@1.52.0: {} + + mime-types@2.1.35: + dependencies: + mime-db: 1.52.0 + + minimatch@3.1.5: + dependencies: + brace-expansion: 1.1.12 + + minimatch@9.0.3: + dependencies: + brace-expansion: 2.0.2 + + minimist@1.2.8: {} + + ms@2.1.3: {} + + nanoid@3.3.11: {} + + natural-compare@1.4.0: {} + + nice-grpc-common@2.0.2: + dependencies: + ts-error: 1.0.6 + + nice-grpc@2.1.14: + dependencies: + '@grpc/grpc-js': 1.14.3 + abort-controller-x: 0.4.3 + nice-grpc-common: 2.0.2 + + node-domexception@1.0.0: {} + + node-fetch@2.7.0: + dependencies: + whatwg-url: 5.0.0 + + object-hash@3.0.0: {} + + object-inspect@1.13.4: {} + + object-keys@1.1.1: {} + + object.assign@4.1.7: + dependencies: + call-bind: 1.0.8 + call-bound: 1.0.4 + define-properties: 1.2.1 + es-object-atoms: 1.1.1 + has-symbols: 1.1.0 + object-keys: 1.1.1 + + object.entries@1.1.9: + dependencies: + call-bind: 1.0.8 + call-bound: 1.0.4 + define-properties: 1.2.1 + es-object-atoms: 1.1.1 + + object.fromentries@2.0.8: + dependencies: + call-bind: 1.0.8 + define-properties: 1.2.1 + es-abstract: 1.24.1 + es-object-atoms: 1.1.1 + + object.groupby@1.0.3: + dependencies: + call-bind: 1.0.8 + define-properties: 1.2.1 + es-abstract: 1.24.1 + + object.values@1.2.1: + dependencies: + call-bind: 1.0.8 + call-bound: 1.0.4 + define-properties: 1.2.1 + es-object-atoms: 1.1.1 + + once@1.4.0: + dependencies: + wrappy: 1.0.2 + + openai@4.104.0(zod@3.25.76): + dependencies: + '@types/node': 18.19.130 + '@types/node-fetch': 2.6.13 + abort-controller: 3.0.0 + agentkeepalive: 4.6.0 + form-data-encoder: 1.7.2 + formdata-node: 4.4.1 + node-fetch: 2.7.0 + optionalDependencies: + zod: 3.25.76 + transitivePeerDependencies: + - encoding + + opencollective-postinstall@2.0.3: {} + + optionator@0.9.4: + dependencies: + deep-is: 0.1.4 + fast-levenshtein: 2.0.6 + levn: 0.4.1 + prelude-ls: 1.2.1 + type-check: 0.4.0 + word-wrap: 1.2.5 + + own-keys@1.0.1: + dependencies: + get-intrinsic: 1.3.0 + object-keys: 1.1.1 + safe-push-apply: 1.0.0 + + p-limit@3.1.0: + dependencies: + yocto-queue: 0.1.0 + + p-locate@5.0.0: + dependencies: + p-limit: 3.1.0 + + parent-module@1.0.1: + dependencies: + callsites: 3.1.0 + + path-exists@4.0.0: {} + + path-is-absolute@1.0.1: {} + + path-key@3.1.1: {} + + path-parse@1.0.7: {} + + path-type@4.0.0: {} + + picomatch@2.3.1: {} + + playwright-core@1.58.2: {} + + playwright@1.58.2: + dependencies: + playwright-core: 1.58.2 + optionalDependencies: + fsevents: 2.3.2 + + possible-typed-array-names@1.1.0: {} + + prelude-ls@1.2.1: {} + + process@0.11.10: {} + + prom-client@15.1.3: + dependencies: + '@opentelemetry/api': 1.9.0 + tdigest: 0.1.2 + optional: true + + proto3-json-serializer@2.0.2: + dependencies: + protobufjs: 7.5.4 + + protobufjs@7.5.4: + dependencies: + '@protobufjs/aspromise': 1.1.2 + '@protobufjs/base64': 1.1.2 + '@protobufjs/codegen': 2.0.4 + '@protobufjs/eventemitter': 1.1.0 + '@protobufjs/fetch': 1.1.0 + '@protobufjs/float': 1.0.2 + '@protobufjs/inquire': 1.1.0 + '@protobufjs/path': 1.1.2 + '@protobufjs/pool': 1.1.0 + '@protobufjs/utf8': 1.1.0 + '@types/node': 22.19.13 + long: 5.3.2 + + proxy-from-env@1.1.0: {} + + punycode@2.3.1: {} + + qs@6.15.0: + dependencies: + side-channel: 1.1.0 + + queue-microtask@1.2.3: {} + + react@19.2.4: {} + + readable-stream@3.6.2: + dependencies: + inherits: 2.0.4 + string_decoder: 1.3.0 + util-deprecate: 1.0.2 + + readable-stream@4.7.0: + dependencies: + abort-controller: 3.0.0 + buffer: 6.0.3 + events: 3.3.0 + process: 0.11.10 + string_decoder: 1.3.0 + + reflect.getprototypeof@1.0.10: + dependencies: + call-bind: 1.0.8 + define-properties: 1.2.1 + es-abstract: 1.24.1 + es-errors: 1.3.0 + es-object-atoms: 1.1.1 + get-intrinsic: 1.3.0 + get-proto: 1.0.1 + which-builtin-type: 1.2.1 + + regenerator-runtime@0.13.11: {} + + regexp.prototype.flags@1.5.4: + dependencies: + call-bind: 1.0.8 + define-properties: 1.2.1 + es-errors: 1.3.0 + get-proto: 1.0.1 + gopd: 1.2.0 + set-function-name: 2.0.2 + + require-directory@2.1.1: {} + + resolve-from@4.0.0: {} + + resolve@1.22.11: + dependencies: + is-core-module: 2.16.1 + path-parse: 1.0.7 + supports-preserve-symlinks-flag: 1.0.0 + + retry-request@7.0.2: + dependencies: + '@types/request': 2.48.13 + extend: 3.0.2 + teeny-request: 9.0.0 + transitivePeerDependencies: + - encoding + - supports-color + + reusify@1.1.0: {} + + rimraf@3.0.2: + dependencies: + glob: 7.2.3 + + run-parallel@1.2.0: + dependencies: + queue-microtask: 1.2.3 + + safe-array-concat@1.1.3: + dependencies: + call-bind: 1.0.8 + call-bound: 1.0.4 + get-intrinsic: 1.3.0 + has-symbols: 1.1.0 + isarray: 2.0.5 + + safe-buffer@5.2.1: {} + + safe-push-apply@1.0.0: + dependencies: + es-errors: 1.3.0 + isarray: 2.0.5 + + safe-regex-test@1.1.0: + dependencies: + call-bound: 1.0.4 + es-errors: 1.3.0 + is-regex: 1.2.1 + + secure-json-parse@2.7.0: {} + + semver@6.3.1: {} + + semver@7.7.4: {} + + set-function-length@1.2.2: + dependencies: + define-data-property: 1.1.4 + es-errors: 1.3.0 + function-bind: 1.1.2 + get-intrinsic: 1.3.0 + gopd: 1.2.0 + has-property-descriptors: 1.0.2 + + set-function-name@2.0.2: + dependencies: + define-data-property: 1.1.4 + es-errors: 1.3.0 + functions-have-names: 1.2.3 + has-property-descriptors: 1.0.2 + + set-proto@1.0.0: + dependencies: + dunder-proto: 1.0.1 + es-errors: 1.3.0 + es-object-atoms: 1.1.1 + + shebang-command@2.0.0: + dependencies: + shebang-regex: 3.0.0 + + shebang-regex@3.0.0: {} + + side-channel-list@1.0.0: + dependencies: + es-errors: 1.3.0 + object-inspect: 1.13.4 + + side-channel-map@1.0.1: + dependencies: + call-bound: 1.0.4 + es-errors: 1.3.0 + get-intrinsic: 1.3.0 + object-inspect: 1.13.4 + + side-channel-weakmap@1.0.2: + dependencies: + call-bound: 1.0.4 + es-errors: 1.3.0 + get-intrinsic: 1.3.0 + object-inspect: 1.13.4 + side-channel-map: 1.0.1 + + side-channel@1.1.0: + dependencies: + es-errors: 1.3.0 + object-inspect: 1.13.4 + side-channel-list: 1.0.0 + side-channel-map: 1.0.1 + side-channel-weakmap: 1.0.2 + + slash@3.0.0: {} + + stop-iteration-iterator@1.1.0: + dependencies: + es-errors: 1.3.0 + internal-slot: 1.1.0 + + stream-events@1.0.5: + dependencies: + stubs: 3.0.0 + + stream-shift@1.0.3: {} + + string-width@4.2.3: + dependencies: + emoji-regex: 8.0.0 + is-fullwidth-code-point: 3.0.0 + strip-ansi: 6.0.1 + + string.prototype.trim@1.2.10: + dependencies: + call-bind: 1.0.8 + call-bound: 1.0.4 + define-data-property: 1.1.4 + define-properties: 1.2.1 + es-abstract: 1.24.1 + es-object-atoms: 1.1.1 + has-property-descriptors: 1.0.2 + + string.prototype.trimend@1.0.9: + dependencies: + call-bind: 1.0.8 + call-bound: 1.0.4 + define-properties: 1.2.1 + es-object-atoms: 1.1.1 + + string.prototype.trimstart@1.0.8: + dependencies: + call-bind: 1.0.8 + define-properties: 1.2.1 + es-object-atoms: 1.1.1 + + string_decoder@1.3.0: + dependencies: + safe-buffer: 5.2.1 + + strip-ansi@6.0.1: + dependencies: + ansi-regex: 5.0.1 + + strip-bom@3.0.0: {} + + strip-json-comments@3.1.1: {} + + strnum@2.2.0: {} + + stubs@3.0.0: {} + + supports-color@7.2.0: + dependencies: + has-flag: 4.0.0 + + supports-preserve-symlinks-flag@1.0.0: {} + + swr@2.4.1(react@19.2.4): + dependencies: + dequal: 2.0.3 + react: 19.2.4 + use-sync-external-store: 1.6.0(react@19.2.4) + + tdigest@0.1.2: + dependencies: + bintrees: 1.0.2 + optional: true + + teeny-request@9.0.0: + dependencies: + http-proxy-agent: 5.0.0 + https-proxy-agent: 5.0.1 + node-fetch: 2.7.0 + stream-events: 1.0.5 + uuid: 9.0.1 + transitivePeerDependencies: + - encoding + - supports-color + + tesseract.js-core@5.1.1: {} + + tesseract.js@5.1.1: + dependencies: + bmp-js: 0.1.0 + idb-keyval: 6.2.2 + is-electron: 2.2.2 + is-url: 1.2.4 + node-fetch: 2.7.0 + opencollective-postinstall: 2.0.3 + regenerator-runtime: 0.13.11 + tesseract.js-core: 5.1.1 + wasm-feature-detect: 1.8.0 + zlibjs: 0.3.1 + transitivePeerDependencies: + - encoding + + text-table@0.2.0: {} + + throttleit@2.1.0: {} + + to-regex-range@5.0.1: + dependencies: + is-number: 7.0.0 + + tr46@0.0.3: {} + + ts-api-utils@1.4.3(typescript@5.9.3): + dependencies: + typescript: 5.9.3 + + ts-error@1.0.6: {} + + tsconfig-paths@3.15.0: + dependencies: + '@types/json5': 0.0.29 + json5: 1.0.2 + minimist: 1.2.8 + strip-bom: 3.0.0 + + tslib@2.8.1: {} + + type-check@0.4.0: + dependencies: + prelude-ls: 1.2.1 + + type-fest@0.20.2: {} + + typed-array-buffer@1.0.3: + dependencies: + call-bound: 1.0.4 + es-errors: 1.3.0 + is-typed-array: 1.1.15 + + typed-array-byte-length@1.0.3: + dependencies: + call-bind: 1.0.8 + for-each: 0.3.5 + gopd: 1.2.0 + has-proto: 1.2.0 + is-typed-array: 1.1.15 + + typed-array-byte-offset@1.0.4: + dependencies: + available-typed-arrays: 1.0.7 + call-bind: 1.0.8 + for-each: 0.3.5 + gopd: 1.2.0 + has-proto: 1.2.0 + is-typed-array: 1.1.15 + reflect.getprototypeof: 1.0.10 + + typed-array-length@1.0.7: + dependencies: + call-bind: 1.0.8 + for-each: 0.3.5 + gopd: 1.2.0 + is-typed-array: 1.1.15 + possible-typed-array-names: 1.1.0 + reflect.getprototypeof: 1.0.10 + + typescript-event-target@1.1.2: {} + + typescript@5.9.3: {} + + unbox-primitive@1.1.0: + dependencies: + call-bound: 1.0.4 + has-bigints: 1.1.0 + has-symbols: 1.1.0 + which-boxed-primitive: 1.1.1 + + undici-types@5.26.5: {} + + undici-types@6.21.0: {} + + uri-js@4.4.1: + dependencies: + punycode: 2.3.1 + + use-sync-external-store@1.6.0(react@19.2.4): + dependencies: + react: 19.2.4 + + util-deprecate@1.0.2: {} + + uuid@9.0.1: {} + + wasm-feature-detect@1.8.0: {} + + web-streams-polyfill@3.3.3: {} + + web-streams-polyfill@4.0.0-beta.3: {} + + webidl-conversions@3.0.1: {} + + whatwg-url@5.0.0: + dependencies: + tr46: 0.0.3 + webidl-conversions: 3.0.1 + + which-boxed-primitive@1.1.1: + dependencies: + is-bigint: 1.1.0 + is-boolean-object: 1.2.2 + is-number-object: 1.1.1 + is-string: 1.1.1 + is-symbol: 1.1.1 + + which-builtin-type@1.2.1: + dependencies: + call-bound: 1.0.4 + function.prototype.name: 1.1.8 + has-tostringtag: 1.0.2 + is-async-function: 2.1.1 + is-date-object: 1.1.0 + is-finalizationregistry: 1.1.1 + is-generator-function: 1.1.2 + is-regex: 1.2.1 + is-weakref: 1.1.1 + isarray: 2.0.5 + which-boxed-primitive: 1.1.1 + which-collection: 1.0.2 + which-typed-array: 1.1.20 + + which-collection@1.0.2: + dependencies: + is-map: 2.0.3 + is-set: 2.0.3 + is-weakmap: 2.0.2 + is-weakset: 2.0.4 + + which-typed-array@1.1.20: + dependencies: + available-typed-arrays: 1.0.7 + call-bind: 1.0.8 + call-bound: 1.0.4 + for-each: 0.3.5 + get-proto: 1.0.1 + gopd: 1.2.0 + has-tostringtag: 1.0.2 + + which@2.0.2: + dependencies: + isexe: 2.0.0 + + word-wrap@1.2.5: {} + + wrap-ansi@7.0.0: + dependencies: + ansi-styles: 4.3.0 + string-width: 4.2.3 + strip-ansi: 6.0.1 + + wrappy@1.0.2: {} + + y18n@5.0.8: {} + + yaml@2.8.2: {} + + yargs-parser@20.2.9: {} + + yargs-parser@21.1.1: {} + + yargs@17.7.2: + dependencies: + cliui: 8.0.1 + escalade: 3.2.0 + get-caller-file: 2.0.5 + require-directory: 2.1.1 + string-width: 4.2.3 + y18n: 5.0.8 + yargs-parser: 21.1.1 + + yocto-queue@0.1.0: {} + + zlibjs@0.3.1: {} + + zod-to-json-schema@3.25.1(zod@3.25.76): + dependencies: + zod: 3.25.76 + + zod@3.25.76: {} diff --git a/sdks/guides/typescript/rag-and-indexing/mock-embedding.ts b/sdks/guides/typescript/rag-and-indexing/mock-embedding.ts new file mode 100644 index 0000000000..2fa488c4da --- /dev/null +++ b/sdks/guides/typescript/rag-and-indexing/mock-embedding.ts @@ -0,0 +1,5 @@ +/** Mock embedding - no external API dependencies */ + +export function embed(text: string): number[] { + return Array(64).fill(0.1); +} diff --git a/sdks/guides/typescript/rag-and-indexing/worker.ts b/sdks/guides/typescript/rag-and-indexing/worker.ts new file mode 100644 index 0000000000..770c8e693d --- /dev/null +++ b/sdks/guides/typescript/rag-and-indexing/worker.ts @@ -0,0 +1,15 @@ +import { hatchet } from '../../hatchet-client'; +import { ragWf, embedChunkTask, queryTask } from './workflow'; + +async function main() { + // > Step 06 Run Worker + const worker = await hatchet.worker('rag-worker', { + workflows: [ragWf, embedChunkTask, queryTask], + }); + await worker.start(); + // !! +} + +if (require.main === module) { + main(); +} diff --git a/sdks/guides/typescript/rag-and-indexing/workflow.ts b/sdks/guides/typescript/rag-and-indexing/workflow.ts new file mode 100644 index 0000000000..33f915f900 --- /dev/null +++ b/sdks/guides/typescript/rag-and-indexing/workflow.ts @@ -0,0 +1,63 @@ +import { hatchet } from '../../hatchet-client'; +import { embed } from './mock-embedding'; + +// > Step 01 Define Workflow +type DocInput = { doc_id: string; content: string }; + +const ragWf = hatchet.workflow({ name: 'RAGPipeline' }); +// !! + +// > Step 02 Define Ingest Task +const ingest = ragWf.task({ + name: 'ingest', + fn: async (input) => ({ doc_id: input.doc_id, content: input.content }), +}); + +// !! + +// > Step 03 Chunk Task +function chunkContent(content: string, chunkSize = 100): string[] { + const chunks: string[] = []; + for (let i = 0; i < content.length; i += chunkSize) { + chunks.push(content.slice(i, i + chunkSize)); + } + return chunks; +} +// !! + +// > Step 04 Embed Task +const embedChunkTask = hatchet.task<{ chunk: string }>({ + name: 'embed-chunk', + fn: async (input) => ({ vector: embed(input.chunk) }), +}); + +const chunkAndEmbed = ragWf.durableTask({ + name: 'chunk-and-embed', + parents: [ingest], + fn: async (input, ctx) => { + const ingested = await ctx.parentOutput(ingest); + const chunks: string[] = []; + for (let i = 0; i < ingested.content.length; i += 100) { + chunks.push(ingested.content.slice(i, i + 100)); + } + const results = await Promise.all(chunks.map((chunk) => embedChunkTask.run({ chunk }))); + return { doc_id: ingested.doc_id, vectors: results.map((r) => r.vector) }; + }, +}); + +// !! + +// > Step 05 Query Task +type QueryInput = { query: string; top_k?: number }; + +const queryTask = hatchet.durableTask({ + name: 'rag-query', + fn: async (input) => { + const { vector } = await embedChunkTask.run({ chunk: input.query }); + // Replace with a real vector DB lookup in production + return { query: input.query, vector, results: [] }; + }, +}); +// !! + +export { ragWf, embedChunkTask, queryTask }; diff --git a/sdks/guides/typescript/routing/mock-classifier.ts b/sdks/guides/typescript/routing/mock-classifier.ts new file mode 100644 index 0000000000..0f73943b03 --- /dev/null +++ b/sdks/guides/typescript/routing/mock-classifier.ts @@ -0,0 +1,17 @@ +export function mockClassify(message: string): string { + const lower = message.toLowerCase(); + if (lower.includes('bug') || lower.includes('error') || lower.includes('help')) return 'support'; + if (lower.includes('price') || lower.includes('buy') || lower.includes('plan')) return 'sales'; + return 'other'; +} + +export function mockReply(message: string, role: string): string { + switch (role) { + case 'support': + return `[Support] I can help with that technical issue. Let me look into: ${message}`; + case 'sales': + return `[Sales] Great question about pricing! Here's what I can tell you about: ${message}`; + default: + return `[General] Thanks for reaching out. Regarding: ${message}`; + } +} diff --git a/sdks/guides/typescript/routing/worker.ts b/sdks/guides/typescript/routing/worker.ts new file mode 100644 index 0000000000..5022311b55 --- /dev/null +++ b/sdks/guides/typescript/routing/worker.ts @@ -0,0 +1,16 @@ +import { hatchet } from '../../hatchet-client'; +import { classifyTask, supportTask, salesTask, defaultTask, routerTask } from './workflow'; + +async function main() { + // > Step 04 Run Worker + const worker = await hatchet.worker('routing-worker', { + workflows: [classifyTask, supportTask, salesTask, defaultTask, routerTask], + slots: 5, + }); + await worker.start(); + // !! +} + +if (require.main === module) { + main(); +} diff --git a/sdks/guides/typescript/routing/workflow.ts b/sdks/guides/typescript/routing/workflow.ts new file mode 100644 index 0000000000..512e07450d --- /dev/null +++ b/sdks/guides/typescript/routing/workflow.ts @@ -0,0 +1,57 @@ +import { hatchet } from '../../hatchet-client'; +import { mockClassify, mockReply } from './mock-classifier'; + +type MessageInput = { message: string }; + +// > Step 01 Classify Task +const classifyTask = hatchet.durableTask({ + name: 'classify-message', + fn: async (input: MessageInput) => { + return { category: mockClassify(input.message) }; + }, +}); +// !! + +// > Step 02 Specialist Tasks +const supportTask = hatchet.durableTask({ + name: 'handle-support', + fn: async (input: MessageInput) => { + return { response: mockReply(input.message, 'support'), category: 'support' }; + }, +}); + +const salesTask = hatchet.durableTask({ + name: 'handle-sales', + fn: async (input: MessageInput) => { + return { response: mockReply(input.message, 'sales'), category: 'sales' }; + }, +}); + +const defaultTask = hatchet.durableTask({ + name: 'handle-default', + fn: async (input: MessageInput) => { + return { response: mockReply(input.message, 'other'), category: 'other' }; + }, +}); +// !! + +// > Step 03 Router Task +const routerTask = hatchet.durableTask({ + name: 'message-router', + executionTimeout: '2m', + fn: async (input: MessageInput) => { + const { category } = await classifyTask.run(input); + + switch (category) { + case 'support': + return supportTask.run(input); + case 'sales': + return salesTask.run(input); + default: + return defaultTask.run(input); + } + }, +}); +// !! + +export { classifyTask, supportTask, salesTask, defaultTask, routerTask }; diff --git a/sdks/guides/typescript/scheduled-jobs/trigger.ts b/sdks/guides/typescript/scheduled-jobs/trigger.ts new file mode 100644 index 0000000000..f1f166af75 --- /dev/null +++ b/sdks/guides/typescript/scheduled-jobs/trigger.ts @@ -0,0 +1,7 @@ +import { hatchet } from '../../hatchet-client'; + +// > Step 02 Schedule One Time +// Schedule a one-time run at a specific time. +const runAt = new Date(Date.now() + 60 * 60 * 1000); +await hatchet.scheduled.create('ScheduledWorkflow', { triggerAt: runAt, input: {} }); +// !! diff --git a/sdks/guides/typescript/scheduled-jobs/worker.ts b/sdks/guides/typescript/scheduled-jobs/worker.ts new file mode 100644 index 0000000000..d541cf9f0b --- /dev/null +++ b/sdks/guides/typescript/scheduled-jobs/worker.ts @@ -0,0 +1,15 @@ +import { hatchet } from '../../hatchet-client'; +import { cronWf } from './workflow'; + +async function main() { + // > Step 03 Run Worker + const worker = await hatchet.worker('scheduled-worker', { + workflows: [cronWf], + }); + await worker.start(); + // !! +} + +if (require.main === module) { + main(); +} diff --git a/sdks/guides/typescript/scheduled-jobs/workflow.ts b/sdks/guides/typescript/scheduled-jobs/workflow.ts new file mode 100644 index 0000000000..85f1f95851 --- /dev/null +++ b/sdks/guides/typescript/scheduled-jobs/workflow.ts @@ -0,0 +1,15 @@ +import { hatchet } from '../../hatchet-client'; + +// > Step 01 Define Cron Task +const cronWf = hatchet.workflow({ + name: 'ScheduledWorkflow', + on: { cron: '0 * * * *' }, +}); + +cronWf.task({ + name: 'run-scheduled-job', + fn: async () => ({ status: 'completed', job: 'maintenance' }), +}); +// !! + +export { cronWf }; diff --git a/sdks/guides/typescript/streaming/client.ts b/sdks/guides/typescript/streaming/client.ts new file mode 100644 index 0000000000..47837b6a4c --- /dev/null +++ b/sdks/guides/typescript/streaming/client.ts @@ -0,0 +1,11 @@ +import { streamTask } from './workflow'; + +// > Step 03 Subscribe Client +// Client triggers the task and subscribes to the stream. +async function runAndSubscribe() { + const run = await streamTask.run({}); + for await (const chunk of run.stream()) { + console.log(chunk); + } +} +// !! diff --git a/sdks/guides/typescript/streaming/worker.ts b/sdks/guides/typescript/streaming/worker.ts new file mode 100644 index 0000000000..8b44cc8dba --- /dev/null +++ b/sdks/guides/typescript/streaming/worker.ts @@ -0,0 +1,15 @@ +import { hatchet } from '../../hatchet-client'; +import { streamTask } from './workflow'; + +async function main() { + // > Step 04 Run Worker + const worker = await hatchet.worker('streaming-worker', { + workflows: [streamTask], + }); + await worker.start(); + // !! +} + +if (require.main === module) { + main(); +} diff --git a/sdks/guides/typescript/streaming/workflow.ts b/sdks/guides/typescript/streaming/workflow.ts new file mode 100644 index 0000000000..2da26bcd19 --- /dev/null +++ b/sdks/guides/typescript/streaming/workflow.ts @@ -0,0 +1,29 @@ +import { ConcurrencyLimitStrategy } from '@hatchet/protoc/v1/workflows'; +import { hatchet } from '../../hatchet-client'; + +// > Step 01 Define Streaming Task +export const streamTask = hatchet.task({ + name: 'stream-example', + concurrency: { + expression: "'constant'", + maxRuns: 1, + limitStrategy: ConcurrencyLimitStrategy.CANCEL_IN_PROGRESS, + }, + fn: async (_, ctx) => { + for (let i = 0; i < 5; i++) { + ctx.putStream(`chunk-${i}`); + await new Promise((r) => setTimeout(r, 500)); + } + return { status: 'done' }; + }, +}); +// !! + +// > Step 02 Emit Chunks +async function emitChunks(ctx: { putStream: (chunk: string) => void }) { + for (let i = 0; i < 5; i++) { + ctx.putStream(`chunk-${i}`); + await new Promise((r) => setTimeout(r, 500)); + } +} +// !! diff --git a/sdks/guides/typescript/tsconfig.json b/sdks/guides/typescript/tsconfig.json new file mode 100644 index 0000000000..5e942cb23c --- /dev/null +++ b/sdks/guides/typescript/tsconfig.json @@ -0,0 +1,13 @@ +{ + "compilerOptions": { + "target": "ES2020", + "module": "commonjs", + "moduleResolution": "node", + "strict": true, + "esModuleInterop": true, + "skipLibCheck": true, + "resolveJsonModule": true + }, + "include": ["./**/*.ts", "../hatchet-client.ts"], + "exclude": ["node_modules"] +} diff --git a/sdks/guides/typescript/web-scraping/mock-scraper.ts b/sdks/guides/typescript/web-scraping/mock-scraper.ts new file mode 100644 index 0000000000..a75f27fd7e --- /dev/null +++ b/sdks/guides/typescript/web-scraping/mock-scraper.ts @@ -0,0 +1,22 @@ +export interface ScrapeResult { + url: string; + title: string; + content: string; + scrapedAt: string; +} + +export function mockScrape(url: string): ScrapeResult { + return { + url, + title: `Page: ${url}`, + content: `Mock scraped content from ${url}. In production, use Firecrawl, Browserbase, or Playwright here.`, + scrapedAt: new Date().toISOString(), + }; +} + +export function mockExtract(content: string): Record { + return { + summary: content.slice(0, 80), + wordCount: String(content.split(' ').length), + }; +} diff --git a/sdks/guides/typescript/web-scraping/worker.ts b/sdks/guides/typescript/web-scraping/worker.ts new file mode 100644 index 0000000000..ac18df4443 --- /dev/null +++ b/sdks/guides/typescript/web-scraping/worker.ts @@ -0,0 +1,23 @@ +import { RateLimitDuration } from '@hatchet/protoc/v1/workflows'; +import { hatchet } from '../../hatchet-client'; +import { scrapeTask, processTask, scrapeWorkflow, rateLimitedScrapeTask, SCRAPE_RATE_LIMIT_KEY } from './workflow'; + +async function main() { + // > Step 05 Run Worker + await hatchet.ratelimits.upsert({ + key: SCRAPE_RATE_LIMIT_KEY, + limit: 10, + duration: RateLimitDuration.MINUTE, + }); + + const worker = await hatchet.worker('web-scraping-worker', { + workflows: [scrapeTask, processTask, scrapeWorkflow, rateLimitedScrapeTask], + slots: 5, + }); + await worker.start(); + // !! +} + +if (require.main === module) { + main(); +} diff --git a/sdks/guides/typescript/web-scraping/workflow.ts b/sdks/guides/typescript/web-scraping/workflow.ts new file mode 100644 index 0000000000..66b58e08e3 --- /dev/null +++ b/sdks/guides/typescript/web-scraping/workflow.ts @@ -0,0 +1,74 @@ +import { hatchet } from '../../hatchet-client'; +import { mockScrape } from './mock-scraper'; + +type ScrapeInput = { url: string }; + +// > Step 01 Define Scrape Task +const scrapeTask = hatchet.task({ + name: 'scrape-url', + executionTimeout: '2m', + retries: 2, + fn: async (input: ScrapeInput) => { + return mockScrape(input.url); + }, +}); +// !! + +// > Step 02 Process Content +const processTask = hatchet.task({ + name: 'process-content', + fn: async (input: { url: string; content: string }) => { + const links = [...input.content.matchAll(/https?:\/\/[^\s<>"']+/g)].map((m) => m[0]); + const summary = input.content.slice(0, 200).trim(); + const wordCount = input.content.split(/\s+/).filter(Boolean).length; + return { summary, wordCount, links }; + }, +}); +// !! + +// > Step 03 Cron Workflow +const scrapeWorkflow = hatchet.workflow({ + name: 'WebScrapeWorkflow', + on: { cron: '0 */6 * * *' }, +}); + +scrapeWorkflow.task({ + name: 'scheduled-scrape', + fn: async () => { + const urls = [ + 'https://example.com/pricing', + 'https://example.com/blog', + 'https://example.com/docs', + ]; + + const results = []; + for (const url of urls) { + const scraped = await scrapeTask.run({ url }); + const processed = await processTask.run({ url, content: scraped.content }); + results.push({ url, ...processed }); + } + return { refreshed: results.length, results }; + }, +}); +// !! + +// > Step 04 Rate Limited Scrape +const SCRAPE_RATE_LIMIT_KEY = 'scrape-rate-limit'; + +const rateLimitedScrapeTask = hatchet.task({ + name: 'rate-limited-scrape', + executionTimeout: '2m', + retries: 2, + rateLimits: [ + { + staticKey: SCRAPE_RATE_LIMIT_KEY, + units: 1, + }, + ], + fn: async (input: ScrapeInput) => { + return mockScrape(input.url); + }, +}); +// !! + +export { scrapeTask, processTask, scrapeWorkflow, rateLimitedScrapeTask, SCRAPE_RATE_LIMIT_KEY }; diff --git a/sdks/guides/typescript/webhook-processing/worker.ts b/sdks/guides/typescript/webhook-processing/worker.ts new file mode 100644 index 0000000000..beb94e0b5c --- /dev/null +++ b/sdks/guides/typescript/webhook-processing/worker.ts @@ -0,0 +1,15 @@ +import { hatchet } from '../../hatchet-client'; +import { processWebhook } from './workflow'; + +async function main() { + // > Step 04 Run Worker + const worker = await hatchet.worker('webhook-worker', { + workflows: [processWebhook], + }); + await worker.start(); + // !! +} + +if (require.main === module) { + main(); +} diff --git a/sdks/guides/typescript/webhook-processing/workflow.ts b/sdks/guides/typescript/webhook-processing/workflow.ts new file mode 100644 index 0000000000..bc2b94c4df --- /dev/null +++ b/sdks/guides/typescript/webhook-processing/workflow.ts @@ -0,0 +1,32 @@ +import { hatchet } from '../../hatchet-client'; + +type WebhookPayload = { event_id: string; type: string; data: Record }; + +// > Step 01 Define Webhook Task +const processWebhook = hatchet.task({ + name: 'process-webhook', + onEvents: ['webhook:stripe', 'webhook:github'], + fn: async (input) => ({ + processed: input.event_id, + type: input.type, + }), +}); +// !! + +// > Step 02 Register Webhook +// Call from your webhook endpoint to trigger the task. +function forwardWebhook(eventKey: string, payload: WebhookPayload) { + hatchet.event.push(eventKey, payload); +} +// forwardWebhook('webhook:stripe', { event_id: 'evt_123', type: 'payment', data: {} }); +// !! + +// > Step 03 Process Payload +// Validate event_id for deduplication; process idempotently. +function validateAndProcess(input: WebhookPayload) { + if (!input.event_id) throw new Error('event_id required for deduplication'); + return { processed: input.event_id, type: input.type }; +} +// !! + +export { processWebhook };