diff --git a/agents-api/agents_api/autogen/Tasks.py b/agents-api/agents_api/autogen/Tasks.py index add03d675..8738bc62a 100644 --- a/agents-api/agents_api/autogen/Tasks.py +++ b/agents-api/agents_api/autogen/Tasks.py @@ -9,6 +9,12 @@ from pydantic import AwareDatetime, BaseModel, ConfigDict, Field from .Chat import ChatSettings +from .Docs import ( + EmbedQueryRequest, + HybridDocSearchRequest, + TextOnlyDocSearchRequest, + VectorDocSearchRequest, +) from .Entries import InputChatMLMessage from .Tools import CreateToolRequest @@ -18,13 +24,58 @@ class BaseWorkflowStep(BaseModel): populate_by_name=True, ) kind_: Literal[ - "tool_call", "yield", "prompt", "evaluate", "if_else", "wait_for_input", "error" + "tool_call", + "prompt", + "evaluate", + "wait_for_input", + "log", + "embed", + "search", + "set", + "get", + "foreach", + "map_reduce", + "parallel", + "switch", + "if_else", + "sleep", + "return", + "yield", + "error", ] """ The kind of step """ +class CaseThen(BaseModel): + model_config = ConfigDict( + populate_by_name=True, + ) + case: str + """ + The condition to evaluate + """ + then: ( + Any + | ToolCallStep + | YieldStep + | PromptStep + | ErrorWorkflowStep + | SleepStep + | ReturnStep + | GetStep + | SetStep + | LogStep + | EmbedStep + | SearchStep + | WaitForInputStep + ) + """ + The steps to run if the condition is true + """ + + class CreateTaskRequest(BaseModel): """ Payload for creating a task @@ -41,8 +92,19 @@ class CreateTaskRequest(BaseModel): | YieldStep | PromptStep | ErrorWorkflowStep + | SleepStep + | ReturnStep + | GetStep + | SetStep + | LogStep + | EmbedStep + | SearchStep | WaitForInputStep | IfElseWorkflowStep + | SwitchStep + | ForeachStep + | ParallelStep + | MapReduceStep ] """ The entrypoint of the task. @@ -62,6 +124,17 @@ class CreateTaskRequest(BaseModel): metadata: dict[str, Any] | None = None +class EmbedStep(BaseWorkflowStep): + model_config = ConfigDict( + populate_by_name=True, + ) + kind_: Literal["embed"] = "embed" + embed: EmbedQueryRequest + """ + The text to embed + """ + + class ErrorWorkflowStep(BaseWorkflowStep): model_config = ConfigDict( populate_by_name=True, @@ -84,6 +157,56 @@ class EvaluateStep(BaseWorkflowStep): """ +class ForeachDo(BaseModel): + model_config = ConfigDict( + populate_by_name=True, + ) + in_: Annotated[str, Field(alias="in")] + """ + The variable to iterate over + """ + do: list[ + Any + | ToolCallStep + | YieldStep + | PromptStep + | ErrorWorkflowStep + | SleepStep + | ReturnStep + | GetStep + | SetStep + | LogStep + | EmbedStep + | SearchStep + | WaitForInputStep + ] + """ + The steps to run for each iteration + """ + + +class ForeachStep(BaseWorkflowStep): + model_config = ConfigDict( + populate_by_name=True, + ) + kind_: Literal["foreach"] = "foreach" + foreach: ForeachDo + """ + The steps to run for each iteration + """ + + +class GetStep(BaseWorkflowStep): + model_config = ConfigDict( + populate_by_name=True, + ) + kind_: Literal["get"] = "get" + get: str + """ + The key to get + """ + + class IfElseWorkflowStep(BaseWorkflowStep): model_config = ConfigDict( populate_by_name=True, @@ -99,6 +222,13 @@ class IfElseWorkflowStep(BaseWorkflowStep): | YieldStep | PromptStep | ErrorWorkflowStep + | SleepStep + | ReturnStep + | GetStep + | SetStep + | LogStep + | EmbedStep + | SearchStep | WaitForInputStep ) """ @@ -110,6 +240,13 @@ class IfElseWorkflowStep(BaseWorkflowStep): | YieldStep | PromptStep | ErrorWorkflowStep + | SleepStep + | ReturnStep + | GetStep + | SetStep + | LogStep + | EmbedStep + | SearchStep | WaitForInputStep, Field(alias="else"), ] @@ -118,6 +255,71 @@ class IfElseWorkflowStep(BaseWorkflowStep): """ +class LogStep(BaseWorkflowStep): + model_config = ConfigDict( + populate_by_name=True, + ) + kind_: Literal["log"] = "log" + log: str + """ + The value to log + """ + + +class MapOver(BaseModel): + model_config = ConfigDict( + populate_by_name=True, + ) + over: str + """ + The variable to iterate over + """ + workflow: str + """ + The subworkflow to run for each iteration + """ + + +class MapReduceStep(BaseWorkflowStep): + model_config = ConfigDict( + populate_by_name=True, + ) + kind_: Literal["map_reduce"] = "map_reduce" + map: MapOver + """ + The steps to run for each iteration + """ + reduce: str + """ + The expression to reduce the results (`_` is a list of outputs) + """ + + +class ParallelStep(BaseWorkflowStep): + model_config = ConfigDict( + populate_by_name=True, + ) + kind_: Literal["parallel"] = "parallel" + parallel: list[ + Any + | ToolCallStep + | YieldStep + | PromptStep + | ErrorWorkflowStep + | SleepStep + | ReturnStep + | GetStep + | SetStep + | LogStep + | EmbedStep + | SearchStep + | WaitForInputStep + ] + """ + The steps to run in parallel. Max concurrency will depend on the platform + """ + + class PatchTaskRequest(BaseModel): """ Payload for patching a task @@ -134,8 +336,19 @@ class PatchTaskRequest(BaseModel): | YieldStep | PromptStep | ErrorWorkflowStep + | SleepStep + | ReturnStep + | GetStep + | SetStep + | LogStep + | EmbedStep + | SearchStep | WaitForInputStep | IfElseWorkflowStep + | SwitchStep + | ForeachStep + | ParallelStep + | MapReduceStep ] | None ) = None @@ -172,6 +385,97 @@ class PromptStep(BaseWorkflowStep): """ +class ReturnStep(BaseWorkflowStep): + model_config = ConfigDict( + populate_by_name=True, + ) + kind_: Literal["return"] = "return" + return_: Annotated[dict[str, str], Field(alias="return")] + """ + The value to return + """ + + +class SearchStep(BaseWorkflowStep): + model_config = ConfigDict( + populate_by_name=True, + ) + kind_: Literal["search"] = "search" + search: VectorDocSearchRequest | TextOnlyDocSearchRequest | HybridDocSearchRequest + """ + The search query + """ + + +class SetKey(BaseModel): + model_config = ConfigDict( + populate_by_name=True, + ) + key: str + """ + The key to set + """ + value: str + """ + The value to set + """ + + +class SetStep(BaseWorkflowStep): + model_config = ConfigDict( + populate_by_name=True, + ) + kind_: Literal["set"] = "set" + set: SetKey | list[SetKey] + """ + The value to set + """ + + +class SleepFor(BaseModel): + model_config = ConfigDict( + populate_by_name=True, + ) + seconds: Annotated[int, Field(0, ge=0)] + """ + The number of seconds to sleep for + """ + minutes: Annotated[int, Field(0, ge=0)] + """ + The number of minutes to sleep for + """ + hours: Annotated[int, Field(0, ge=0)] + """ + The number of hours to sleep for + """ + days: Annotated[int, Field(0, ge=0)] + """ + The number of days to sleep for + """ + + +class SleepStep(BaseWorkflowStep): + model_config = ConfigDict( + populate_by_name=True, + ) + kind_: Literal["sleep"] = "sleep" + sleep: SleepFor + """ + The duration to sleep for + """ + + +class SwitchStep(BaseWorkflowStep): + model_config = ConfigDict( + populate_by_name=True, + ) + kind_: Literal["switch"] = "switch" + switch: list[CaseThen] + """ + The cond tree + """ + + class Task(BaseModel): """ Object describing a Task @@ -188,8 +492,19 @@ class Task(BaseModel): | YieldStep | PromptStep | ErrorWorkflowStep + | SleepStep + | ReturnStep + | GetStep + | SetStep + | LogStep + | EmbedStep + | SearchStep | WaitForInputStep | IfElseWorkflowStep + | SwitchStep + | ForeachStep + | ParallelStep + | MapReduceStep ] """ The entrypoint of the task. @@ -260,8 +575,19 @@ class UpdateTaskRequest(BaseModel): | YieldStep | PromptStep | ErrorWorkflowStep + | SleepStep + | ReturnStep + | GetStep + | SetStep + | LogStep + | EmbedStep + | SearchStep | WaitForInputStep | IfElseWorkflowStep + | SwitchStep + | ForeachStep + | ParallelStep + | MapReduceStep ] """ The entrypoint of the task. diff --git a/agents-api/poetry.lock b/agents-api/poetry.lock index aaaada66e..48131f6e8 100644 --- a/agents-api/poetry.lock +++ b/agents-api/poetry.lock @@ -2,98 +2,98 @@ [[package]] name = "aiohappyeyeballs" -version = "2.3.5" +version = "2.3.7" description = "Happy Eyeballs for asyncio" optional = false python-versions = ">=3.8" files = [ - {file = "aiohappyeyeballs-2.3.5-py3-none-any.whl", hash = "sha256:4d6dea59215537dbc746e93e779caea8178c866856a721c9c660d7a5a7b8be03"}, - {file = "aiohappyeyeballs-2.3.5.tar.gz", hash = "sha256:6fa48b9f1317254f122a07a131a86b71ca6946ca989ce6326fff54a99a920105"}, + {file = "aiohappyeyeballs-2.3.7-py3-none-any.whl", hash = "sha256:337ce4dc0e99eb697c3c5a77d6cb3c52925824d9a67ac0dea7c55b8a2d60b222"}, + {file = "aiohappyeyeballs-2.3.7.tar.gz", hash = "sha256:e794cd29ba6a14078092984e43688212a19081de3a73b6796c2fdeb3706dd6ce"}, ] [[package]] name = "aiohttp" -version = "3.10.3" +version = "3.10.4" description = "Async http client/server framework (asyncio)" optional = false python-versions = ">=3.8" files = [ - {file = "aiohttp-3.10.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cc36cbdedf6f259371dbbbcaae5bb0e95b879bc501668ab6306af867577eb5db"}, - {file = "aiohttp-3.10.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:85466b5a695c2a7db13eb2c200af552d13e6a9313d7fa92e4ffe04a2c0ea74c1"}, - {file = "aiohttp-3.10.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:71bb1d97bfe7e6726267cea169fdf5df7658831bb68ec02c9c6b9f3511e108bb"}, - {file = "aiohttp-3.10.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:baec1eb274f78b2de54471fc4c69ecbea4275965eab4b556ef7a7698dee18bf2"}, - {file = "aiohttp-3.10.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:13031e7ec1188274bad243255c328cc3019e36a5a907978501256000d57a7201"}, - {file = "aiohttp-3.10.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2bbc55a964b8eecb341e492ae91c3bd0848324d313e1e71a27e3d96e6ee7e8e8"}, - {file = "aiohttp-3.10.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8cc0564b286b625e673a2615ede60a1704d0cbbf1b24604e28c31ed37dc62aa"}, - {file = "aiohttp-3.10.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f817a54059a4cfbc385a7f51696359c642088710e731e8df80d0607193ed2b73"}, - {file = "aiohttp-3.10.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:8542c9e5bcb2bd3115acdf5adc41cda394e7360916197805e7e32b93d821ef93"}, - {file = "aiohttp-3.10.3-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:671efce3a4a0281060edf9a07a2f7e6230dca3a1cbc61d110eee7753d28405f7"}, - {file = "aiohttp-3.10.3-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:0974f3b5b0132edcec92c3306f858ad4356a63d26b18021d859c9927616ebf27"}, - {file = "aiohttp-3.10.3-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:44bb159b55926b57812dca1b21c34528e800963ffe130d08b049b2d6b994ada7"}, - {file = "aiohttp-3.10.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:6ae9ae382d1c9617a91647575255ad55a48bfdde34cc2185dd558ce476bf16e9"}, - {file = "aiohttp-3.10.3-cp310-cp310-win32.whl", hash = "sha256:aed12a54d4e1ee647376fa541e1b7621505001f9f939debf51397b9329fd88b9"}, - {file = "aiohttp-3.10.3-cp310-cp310-win_amd64.whl", hash = "sha256:b51aef59370baf7444de1572f7830f59ddbabd04e5292fa4218d02f085f8d299"}, - {file = "aiohttp-3.10.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e021c4c778644e8cdc09487d65564265e6b149896a17d7c0f52e9a088cc44e1b"}, - {file = "aiohttp-3.10.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:24fade6dae446b183e2410a8628b80df9b7a42205c6bfc2eff783cbeedc224a2"}, - {file = "aiohttp-3.10.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bc8e9f15939dacb0e1f2d15f9c41b786051c10472c7a926f5771e99b49a5957f"}, - {file = "aiohttp-3.10.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5a9ec959b5381271c8ec9310aae1713b2aec29efa32e232e5ef7dcca0df0279"}, - {file = "aiohttp-3.10.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2a5d0ea8a6467b15d53b00c4e8ea8811e47c3cc1bdbc62b1aceb3076403d551f"}, - {file = "aiohttp-3.10.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c9ed607dbbdd0d4d39b597e5bf6b0d40d844dfb0ac6a123ed79042ef08c1f87e"}, - {file = "aiohttp-3.10.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3e66d5b506832e56add66af88c288c1d5ba0c38b535a1a59e436b300b57b23e"}, - {file = "aiohttp-3.10.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fda91ad797e4914cca0afa8b6cccd5d2b3569ccc88731be202f6adce39503189"}, - {file = "aiohttp-3.10.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:61ccb867b2f2f53df6598eb2a93329b5eee0b00646ee79ea67d68844747a418e"}, - {file = "aiohttp-3.10.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:6d881353264e6156f215b3cb778c9ac3184f5465c2ece5e6fce82e68946868ef"}, - {file = "aiohttp-3.10.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:b031ce229114825f49cec4434fa844ccb5225e266c3e146cb4bdd025a6da52f1"}, - {file = "aiohttp-3.10.3-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:5337cc742a03f9e3213b097abff8781f79de7190bbfaa987bd2b7ceb5bb0bdec"}, - {file = "aiohttp-3.10.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ab3361159fd3dcd0e48bbe804006d5cfb074b382666e6c064112056eb234f1a9"}, - {file = "aiohttp-3.10.3-cp311-cp311-win32.whl", hash = "sha256:05d66203a530209cbe40f102ebaac0b2214aba2a33c075d0bf825987c36f1f0b"}, - {file = "aiohttp-3.10.3-cp311-cp311-win_amd64.whl", hash = "sha256:70b4a4984a70a2322b70e088d654528129783ac1ebbf7dd76627b3bd22db2f17"}, - {file = "aiohttp-3.10.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:166de65e2e4e63357cfa8417cf952a519ac42f1654cb2d43ed76899e2319b1ee"}, - {file = "aiohttp-3.10.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:7084876352ba3833d5d214e02b32d794e3fd9cf21fdba99cff5acabeb90d9806"}, - {file = "aiohttp-3.10.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d98c604c93403288591d7d6d7d6cc8a63459168f8846aeffd5b3a7f3b3e5e09"}, - {file = "aiohttp-3.10.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d73b073a25a0bb8bf014345374fe2d0f63681ab5da4c22f9d2025ca3e3ea54fc"}, - {file = "aiohttp-3.10.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8da6b48c20ce78f5721068f383e0e113dde034e868f1b2f5ee7cb1e95f91db57"}, - {file = "aiohttp-3.10.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3a9dcdccf50284b1b0dc72bc57e5bbd3cc9bf019060dfa0668f63241ccc16aa7"}, - {file = "aiohttp-3.10.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56fb94bae2be58f68d000d046172d8b8e6b1b571eb02ceee5535e9633dcd559c"}, - {file = "aiohttp-3.10.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bf75716377aad2c718cdf66451c5cf02042085d84522aec1f9246d3e4b8641a6"}, - {file = "aiohttp-3.10.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6c51ed03e19c885c8e91f574e4bbe7381793f56f93229731597e4a499ffef2a5"}, - {file = "aiohttp-3.10.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b84857b66fa6510a163bb083c1199d1ee091a40163cfcbbd0642495fed096204"}, - {file = "aiohttp-3.10.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c124b9206b1befe0491f48185fd30a0dd51b0f4e0e7e43ac1236066215aff272"}, - {file = "aiohttp-3.10.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:3461d9294941937f07bbbaa6227ba799bc71cc3b22c40222568dc1cca5118f68"}, - {file = "aiohttp-3.10.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:08bd0754d257b2db27d6bab208c74601df6f21bfe4cb2ec7b258ba691aac64b3"}, - {file = "aiohttp-3.10.3-cp312-cp312-win32.whl", hash = "sha256:7f9159ae530297f61a00116771e57516f89a3de6ba33f314402e41560872b50a"}, - {file = "aiohttp-3.10.3-cp312-cp312-win_amd64.whl", hash = "sha256:e1128c5d3a466279cb23c4aa32a0f6cb0e7d2961e74e9e421f90e74f75ec1edf"}, - {file = "aiohttp-3.10.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:d1100e68e70eb72eadba2b932b185ebf0f28fd2f0dbfe576cfa9d9894ef49752"}, - {file = "aiohttp-3.10.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a541414578ff47c0a9b0b8b77381ea86b0c8531ab37fc587572cb662ccd80b88"}, - {file = "aiohttp-3.10.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d5548444ef60bf4c7b19ace21f032fa42d822e516a6940d36579f7bfa8513f9c"}, - {file = "aiohttp-3.10.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ba2e838b5e6a8755ac8297275c9460e729dc1522b6454aee1766c6de6d56e5e"}, - {file = "aiohttp-3.10.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:48665433bb59144aaf502c324694bec25867eb6630fcd831f7a893ca473fcde4"}, - {file = "aiohttp-3.10.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bac352fceed158620ce2d701ad39d4c1c76d114255a7c530e057e2b9f55bdf9f"}, - {file = "aiohttp-3.10.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2b0f670502100cdc567188c49415bebba947eb3edaa2028e1a50dd81bd13363f"}, - {file = "aiohttp-3.10.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43b09f38a67679e32d380fe512189ccb0b25e15afc79b23fbd5b5e48e4fc8fd9"}, - {file = "aiohttp-3.10.3-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:cd788602e239ace64f257d1c9d39898ca65525583f0fbf0988bcba19418fe93f"}, - {file = "aiohttp-3.10.3-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:214277dcb07ab3875f17ee1c777d446dcce75bea85846849cc9d139ab8f5081f"}, - {file = "aiohttp-3.10.3-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:32007fdcaab789689c2ecaaf4b71f8e37bf012a15cd02c0a9db8c4d0e7989fa8"}, - {file = "aiohttp-3.10.3-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:123e5819bfe1b87204575515cf448ab3bf1489cdeb3b61012bde716cda5853e7"}, - {file = "aiohttp-3.10.3-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:812121a201f0c02491a5db335a737b4113151926a79ae9ed1a9f41ea225c0e3f"}, - {file = "aiohttp-3.10.3-cp38-cp38-win32.whl", hash = "sha256:b97dc9a17a59f350c0caa453a3cb35671a2ffa3a29a6ef3568b523b9113d84e5"}, - {file = "aiohttp-3.10.3-cp38-cp38-win_amd64.whl", hash = "sha256:3731a73ddc26969d65f90471c635abd4e1546a25299b687e654ea6d2fc052394"}, - {file = "aiohttp-3.10.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:38d91b98b4320ffe66efa56cb0f614a05af53b675ce1b8607cdb2ac826a8d58e"}, - {file = "aiohttp-3.10.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9743fa34a10a36ddd448bba8a3adc2a66a1c575c3c2940301bacd6cc896c6bf1"}, - {file = "aiohttp-3.10.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7c126f532caf238031c19d169cfae3c6a59129452c990a6e84d6e7b198a001dc"}, - {file = "aiohttp-3.10.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:926e68438f05703e500b06fe7148ef3013dd6f276de65c68558fa9974eeb59ad"}, - {file = "aiohttp-3.10.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:434b3ab75833accd0b931d11874e206e816f6e6626fd69f643d6a8269cd9166a"}, - {file = "aiohttp-3.10.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d35235a44ec38109b811c3600d15d8383297a8fab8e3dec6147477ec8636712a"}, - {file = "aiohttp-3.10.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59c489661edbd863edb30a8bd69ecb044bd381d1818022bc698ba1b6f80e5dd1"}, - {file = "aiohttp-3.10.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:50544fe498c81cb98912afabfc4e4d9d85e89f86238348e3712f7ca6a2f01dab"}, - {file = "aiohttp-3.10.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:09bc79275737d4dc066e0ae2951866bb36d9c6b460cb7564f111cc0427f14844"}, - {file = "aiohttp-3.10.3-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:af4dbec58e37f5afff4f91cdf235e8e4b0bd0127a2a4fd1040e2cad3369d2f06"}, - {file = "aiohttp-3.10.3-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:b22cae3c9dd55a6b4c48c63081d31c00fc11fa9db1a20c8a50ee38c1a29539d2"}, - {file = "aiohttp-3.10.3-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ba562736d3fbfe9241dad46c1a8994478d4a0e50796d80e29d50cabe8fbfcc3f"}, - {file = "aiohttp-3.10.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:f25d6c4e82d7489be84f2b1c8212fafc021b3731abdb61a563c90e37cced3a21"}, - {file = "aiohttp-3.10.3-cp39-cp39-win32.whl", hash = "sha256:b69d832e5f5fa15b1b6b2c8eb6a9fd2c0ec1fd7729cb4322ed27771afc9fc2ac"}, - {file = "aiohttp-3.10.3-cp39-cp39-win_amd64.whl", hash = "sha256:673bb6e3249dc8825df1105f6ef74e2eab779b7ff78e96c15cadb78b04a83752"}, - {file = "aiohttp-3.10.3.tar.gz", hash = "sha256:21650e7032cc2d31fc23d353d7123e771354f2a3d5b05a5647fc30fea214e696"}, + {file = "aiohttp-3.10.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:81037ddda8cc0a95c6d8c1b9029d0b19a62db8770c0e239e3bea0109d294ab66"}, + {file = "aiohttp-3.10.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:71944d4f4090afc07ce96b7029d5a574240e2f39570450df4af0d5b93a5ee64a"}, + {file = "aiohttp-3.10.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c774f08afecc0a617966f45a9c378456e713a999ee60654d9727617def3e4ee4"}, + {file = "aiohttp-3.10.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc990e73613c78ab2930b60266135066f37fdfce6b32dd604f42c5c377ee880a"}, + {file = "aiohttp-3.10.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f6acd1a908740f708358d240f9a3243cec31a456e3ded65c2cb46f6043bc6735"}, + {file = "aiohttp-3.10.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6075e27e7e54fbcd1c129c5699b2d251c885c9892e26d59a0fb7705141c2d14b"}, + {file = "aiohttp-3.10.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc98d93d11d860ac823beb6131f292d82efb76f226b5e28a3eab1ec578dfd041"}, + {file = "aiohttp-3.10.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:201ddf1471567568be381b6d4701e266a768f7eaa2f99ef753f2c9c5e1e3fb5c"}, + {file = "aiohttp-3.10.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:7d202ec55e61f06b1a1eaf317fba7546855cbf803c13ce7625d462fb8c88e238"}, + {file = "aiohttp-3.10.4-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:96b2e7c110a941c8c1a692703b8ac1013e47f17ee03356c71d55c0a54de2ce38"}, + {file = "aiohttp-3.10.4-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:8ba0fbc56c44883bd757ece433f9caadbca67f565934afe9bc53ba3bd99cc368"}, + {file = "aiohttp-3.10.4-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:46cc9069da466652bb7b8b3fac1f8ce2e12a9dc0fb11551faa420c4cdbc60abf"}, + {file = "aiohttp-3.10.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:93a19cd1e9dc703257fda78b8e889c3a08eabaa09f6ff0d867850b03964f80d1"}, + {file = "aiohttp-3.10.4-cp310-cp310-win32.whl", hash = "sha256:8593040bcc8075fc0e817a602bc5d3d74c7bd717619ffc175a8ba0188edebadf"}, + {file = "aiohttp-3.10.4-cp310-cp310-win_amd64.whl", hash = "sha256:326fb5228aadfc395981d9b336d56a698da335897c4143105c73b583d7500839"}, + {file = "aiohttp-3.10.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:dfe48f477e02ef5ab247c6ac431a6109c69b5c24cb3ccbcd3e27c4fb39691fe4"}, + {file = "aiohttp-3.10.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f6fe78b51852e25d4e20be51ef88c2a0bf31432b9f2223bdbd61c01a0f9253a7"}, + {file = "aiohttp-3.10.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5cc75ff5efbd92301e63a157fddb18a6964a3f40e31c77d57e97dbb9bb3373b4"}, + {file = "aiohttp-3.10.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dca39391f45fbb28daa6412f98c625265bf6b512cc41382df61672d1b242f8f4"}, + {file = "aiohttp-3.10.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8616dd5ed8b3b4029021b560305041c62e080bb28f238c27c2e150abe3539587"}, + {file = "aiohttp-3.10.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9d7958ba22854b3f00a7bbb66cde1dc759760ce8a3e6dfe9ea53f06bccaa9aa2"}, + {file = "aiohttp-3.10.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a24ac7164a824ef2e8e4e9a9f6debb1f43c44ad7ad04efc6018a6610555666d"}, + {file = "aiohttp-3.10.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:660ad010b8fd0b26e8edb8ae5c036db5b16baac4278198ad238b11956d920b3d"}, + {file = "aiohttp-3.10.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:93ee83008d3e505db9846a5a1f48a002676d8dcc90ee431a9462541c9b81393c"}, + {file = "aiohttp-3.10.4-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:77071795efd6ba87f409001141fb05c94ee962b9fca6c8fa1f735c2718512de4"}, + {file = "aiohttp-3.10.4-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:ff371ae72a1816c3eeba5c9cff42cb739aaa293fec7d78f180d1c7ee342285b6"}, + {file = "aiohttp-3.10.4-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:c253e81f12da97f85d45441e8c6da0d9c12e07db4a7136b0a955df6fc5e4bf51"}, + {file = "aiohttp-3.10.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:2ce101c447cf7ba4b6e5ab07bfa2c0da21cbab66922f78a601f0b84fd7710d72"}, + {file = "aiohttp-3.10.4-cp311-cp311-win32.whl", hash = "sha256:705c311ecf2d30fbcf3570d1a037c657be99095694223488140c47dee4ef2460"}, + {file = "aiohttp-3.10.4-cp311-cp311-win_amd64.whl", hash = "sha256:ebddbfea8a8d6b97f717658fa85a96681a28990072710d3de3a4eba5d6804a37"}, + {file = "aiohttp-3.10.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:fe4d63f42d9c604521b208b754abfafe01218af4a8f6332b43196ee8fe88bbd5"}, + {file = "aiohttp-3.10.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fef7b7bd3a6911b4d148332136d34d3c2aee3d54d354373b1da6d96bc08089a5"}, + {file = "aiohttp-3.10.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fff8606149098935188fe1e135f7e7991e6a36d6fe394fd15939fc57d0aff889"}, + {file = "aiohttp-3.10.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9eb3df1aa83602be9a5e572c834d74c3c8e382208b59a873aabfe4c493c45ed0"}, + {file = "aiohttp-3.10.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5c4a71d4a5e0cbfd4bfadd13cb84fe2bc76c64d550dc4f22c22008c9354cffb3"}, + {file = "aiohttp-3.10.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bf61884a604c399458c4a42c8caea000fbcc44255ed89577ff50cb688a0fe8e2"}, + {file = "aiohttp-3.10.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2015e4b40bd5dedc8155c2b2d24a2b07963ae02b5772373d0b599a68e38a316b"}, + {file = "aiohttp-3.10.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b06e1a66bf0a1a2d0f12aef25843dfd2093df080d6c1acbc43914bb9c8f36ed3"}, + {file = "aiohttp-3.10.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:eb898c9ad5a1228a669ebe2e2ba3d76aebe1f7c10b78f09a36000254f049fc2b"}, + {file = "aiohttp-3.10.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:2d64a5a7539320c3cecb4bca093ea825fcc906f8461cf8b42a7bf3c706ce1932"}, + {file = "aiohttp-3.10.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:438c6e1492d060b21285f4b6675b941cf96dd9ef3dfdd59940561029b82e3e1f"}, + {file = "aiohttp-3.10.4-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:e99bf118afb2584848dba169a685fe092b338a4fe52ae08c7243d7bc4cc204fe"}, + {file = "aiohttp-3.10.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:9dc26781fb95225c6170619dece8b5c6ca7cfb1b0be97b7ee719915773d0c2a9"}, + {file = "aiohttp-3.10.4-cp312-cp312-win32.whl", hash = "sha256:45bb655cb8b3a61e19977183a4e0962051ae90f6d46588ed4addb8232128141c"}, + {file = "aiohttp-3.10.4-cp312-cp312-win_amd64.whl", hash = "sha256:347bbdc48411badc24fe3a13565820bc742db3aa2f9127cd5f48c256caf87e29"}, + {file = "aiohttp-3.10.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:4ad284cee0fdcdc0216346b849fd53d201b510aff3c48aa3622daec9ada4bf80"}, + {file = "aiohttp-3.10.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:58df59234be7d7e80548b9482ebfeafdda21948c25cb2873c7f23870c8053dfe"}, + {file = "aiohttp-3.10.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5f52225af7f91f27b633f73473e9ef0aa8e2112d57b69eaf3aa4479e3ea3bc0e"}, + {file = "aiohttp-3.10.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:93f1a0e12c321d923c024b56d7dcd8012e60bf30a4b3fb69a88be15dcb9ab80b"}, + {file = "aiohttp-3.10.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9e9e9a51dd12f2f71fdbd7f7230dcb75ed8f77d8ac8e07c73b599b6d7027e5c"}, + {file = "aiohttp-3.10.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:38bb515f1affc36d3d97b02bf82099925a5785c4a96066ff4400a83ad09d3d5d"}, + {file = "aiohttp-3.10.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e685afb0e3b7b861d89cb3690d89eeda221b43095352efddaaa735c6baf87f3"}, + {file = "aiohttp-3.10.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd5673e3391564871ba6753cf674dcf2051ef19dc508998fe0758a6c7b429a0"}, + {file = "aiohttp-3.10.4-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:4b34e5086e1ead3baa740e32adf35cc5e42338e44c4b07f7b62b41ca6d6a5bfd"}, + {file = "aiohttp-3.10.4-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:c3fd3b8f0164fb2866400cd6eb9e884ab0dc95f882cf8b25e560ace7350c552d"}, + {file = "aiohttp-3.10.4-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:b95e1694d234f27b4bbf5bdef56bb751974ac5dbe045b1e462bde1fe39421cbe"}, + {file = "aiohttp-3.10.4-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:c031de4dfabe7bb6565743745ab43d20588944ddfc7233360169cab4008eee2f"}, + {file = "aiohttp-3.10.4-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:03c5a3143d4a82c43a3d82ac77d9cdef527a72f1c04dcca7b14770879f33d196"}, + {file = "aiohttp-3.10.4-cp38-cp38-win32.whl", hash = "sha256:b71722b527445e02168e2d1cf435772731874671a647fa159ad000feea7933b6"}, + {file = "aiohttp-3.10.4-cp38-cp38-win_amd64.whl", hash = "sha256:0fd1f57aac7d01c9c768675d531976d20d5b79d9da67fac87e55d41b4ade05f9"}, + {file = "aiohttp-3.10.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:15b36a644d1f44ea3d94a0bbb71e75d5f394a3135dc388a209466e22b711ce64"}, + {file = "aiohttp-3.10.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:394ddf9d216cf0bd429b223239a0ab628f01a7a1799c93ce4685eedcdd51b9bc"}, + {file = "aiohttp-3.10.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:dd33f4d571b4143fc9318c3d9256423579c7d183635acc458a6db81919ae5204"}, + {file = "aiohttp-3.10.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e5991b80886655e6c785aadf3114d4f86e6bec2da436e2bb62892b9f048450a4"}, + {file = "aiohttp-3.10.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:92021bf0a4b9ad16851a6c1ca3c86e5b09aecca4f7a2576430c6bbf3114922b1"}, + {file = "aiohttp-3.10.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:938e37fd337343c67471098736deb33066d72cec7d8927b9c1b6b4ea807ade9e"}, + {file = "aiohttp-3.10.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d697023b16c62f9aeb3ffdfb8ec4ac3afd477388993b9164b47dadbd60e7062"}, + {file = "aiohttp-3.10.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c2f9f07fe6d0d51bd2a788cbb339f1570fd691449c53b5dec83ff838f117703e"}, + {file = "aiohttp-3.10.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:50ac670f3fc13ce95e4d6d5a299db9288cc84c663aa630142444ef504756fcf7"}, + {file = "aiohttp-3.10.4-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:9bcdd19398212785a9cb82a63a4b75a299998343f3f5732dfd37c1a4275463f9"}, + {file = "aiohttp-3.10.4-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:122c26f0976225aba46f381e3cabb5ef89a08af6503fc30493fb732e578cfa55"}, + {file = "aiohttp-3.10.4-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:d0665e2a346b6b66959f831ffffd8aa71dd07dd2300017d478f5b47573e66cfe"}, + {file = "aiohttp-3.10.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:625a4a9d4b9f80e7bbaaf2ace06341cf701b2fee54232843addf0bb7304597fb"}, + {file = "aiohttp-3.10.4-cp39-cp39-win32.whl", hash = "sha256:5115490112f39f16ae87c1b34dff3e2c95306cf456b1d2af5974c4ac7d2d1ec7"}, + {file = "aiohttp-3.10.4-cp39-cp39-win_amd64.whl", hash = "sha256:9b58b2ef7f28a2462ba86acbf3b20371bd80a1faa1cfd82f31968af4ac81ef25"}, + {file = "aiohttp-3.10.4.tar.gz", hash = "sha256:23a5f97e7dd22e181967fb6cb6c3b11653b0fdbbc4bb7739d9b6052890ccab96"}, ] [package.dependencies] @@ -759,19 +759,19 @@ develop = ["coverage", "invoke", "path.py", "pylint", "pytest (>=3.2)", "pytest- [[package]] name = "dask" -version = "2024.8.0" +version = "2024.8.1" description = "Parallel PyData with Task Scheduling" optional = false -python-versions = ">=3.9" +python-versions = ">=3.10" files = [ - {file = "dask-2024.8.0-py3-none-any.whl", hash = "sha256:250ea3df30d4a25958290eec4f252850091c6cfaed82d098179c3b25bba18309"}, - {file = "dask-2024.8.0.tar.gz", hash = "sha256:f1fec39373d2f101bc045529ad4e9b30e34e6eb33b7aa0fa7073aec7b1bf9eee"}, + {file = "dask-2024.8.1-py3-none-any.whl", hash = "sha256:b8b58cba91dc9c057c8676dcc80b8bc321602b4dfd21529d33b03b55d428e2c3"}, + {file = "dask-2024.8.1.tar.gz", hash = "sha256:4254e43ac8c3affad2b22952f126b00a00f52c87caae91c068d8e395a4ad1a72"}, ] [package.dependencies] click = ">=8.1" -cloudpickle = ">=1.5.0" -distributed = {version = "2024.8.0", optional = true, markers = "extra == \"distributed\""} +cloudpickle = ">=3.0.0" +distributed = {version = "2024.8.1", optional = true, markers = "extra == \"distributed\""} fsspec = ">=2021.09.0" importlib-metadata = {version = ">=4.13.0", markers = "python_version < \"3.12\""} packaging = ">=20.0" @@ -784,7 +784,7 @@ array = ["numpy (>=1.21)"] complete = ["dask[array,dataframe,diagnostics,distributed]", "lz4 (>=4.3.2)", "pyarrow (>=7.0)", "pyarrow-hotfix"] dataframe = ["dask-expr (>=1.1,<1.2)", "dask[array]", "pandas (>=2.0)"] diagnostics = ["bokeh (>=2.4.2)", "jinja2 (>=2.10.3)"] -distributed = ["distributed (==2024.8.0)"] +distributed = ["distributed (==2024.8.1)"] test = ["pandas[test]", "pre-commit", "pytest", "pytest-cov", "pytest-rerunfailures", "pytest-timeout", "pytest-xdist"] [[package]] @@ -896,30 +896,30 @@ files = [ [[package]] name = "distributed" -version = "2024.8.0" +version = "2024.8.1" description = "Distributed scheduler for Dask" optional = false -python-versions = ">=3.9" +python-versions = ">=3.10" files = [ - {file = "distributed-2024.8.0-py3-none-any.whl", hash = "sha256:11af55d22dd6e04eb868b87f166b8f59ef1b300f659f87c016643b7f98280ec6"}, - {file = "distributed-2024.8.0.tar.gz", hash = "sha256:b99caf0a7f257f59477a70a334e081c1241f7cd9860211cc669742e6450e1310"}, + {file = "distributed-2024.8.1-py3-none-any.whl", hash = "sha256:03f5d3fe7a407cdc16dd2bc25dff4900b72f8dee896b7174eebe8a10b42d8c06"}, + {file = "distributed-2024.8.1.tar.gz", hash = "sha256:82394ceb68b91118717148dbe182cff679f32621812bd7b2bc27eaaa8589f962"}, ] [package.dependencies] click = ">=8.0" -cloudpickle = ">=1.5.0" -dask = "2024.8.0" +cloudpickle = ">=2.0.0" +dask = "2024.8.1" jinja2 = ">=2.10.3" locket = ">=1.0.0" -msgpack = ">=1.0.0" +msgpack = ">=1.0.2" packaging = ">=20.0" -psutil = ">=5.7.2" -pyyaml = ">=5.3.1" +psutil = ">=5.8.0" +pyyaml = ">=5.4.1" sortedcontainers = ">=2.0.5" tblib = ">=1.6.0" -toolz = ">=0.10.0" -tornado = ">=6.0.4" -urllib3 = ">=1.24.3" +toolz = ">=0.11.2" +tornado = ">=6.2.0" +urllib3 = ">=1.26.5" zict = ">=3.0.0" [[package]] @@ -2149,13 +2149,13 @@ tenacity = ">=8.1.0,<8.4.0 || >8.4.0,<9.0.0" [[package]] name = "langchain-core" -version = "0.2.32" +version = "0.2.33" description = "Building applications with LLMs through composability" optional = false python-versions = "<4.0,>=3.8.1" files = [ - {file = "langchain_core-0.2.32-py3-none-any.whl", hash = "sha256:1f5584cf0034909e35ea17010a847d4079417e0ddcb5a9eb3fbb2bd55f3268c0"}, - {file = "langchain_core-0.2.32.tar.gz", hash = "sha256:d82cdc350bbbe74261330d87056b7d9f1fb567828e9e03f708d23a48b941819e"}, + {file = "langchain_core-0.2.33-py3-none-any.whl", hash = "sha256:c8de411336c13fa440b7a52895bfd1c064f04d315344855962988483902cc532"}, + {file = "langchain_core-0.2.33.tar.gz", hash = "sha256:dd2659e0a560fc987b210107bf989aa14a6f4b67dd214c13a2c9669036cda975"}, ] [package.dependencies] @@ -2169,17 +2169,17 @@ typing-extensions = ">=4.7" [[package]] name = "langchain-openai" -version = "0.1.21" +version = "0.1.22" description = "An integration package connecting OpenAI and LangChain" optional = false python-versions = "<4.0,>=3.8.1" files = [ - {file = "langchain_openai-0.1.21-py3-none-any.whl", hash = "sha256:44420f0c84859ae236a80c8ac8754a16d5b660c24377c27ba98308145d346352"}, - {file = "langchain_openai-0.1.21.tar.gz", hash = "sha256:2c65feaf12bb284eccf7bce35725fd06f3035fa751babad6aa84af2f99867f88"}, + {file = "langchain_openai-0.1.22-py3-none-any.whl", hash = "sha256:e184ab867a30f803dc210a388537186b1b670a33d910a7e0fa4e0329d3b6c654"}, + {file = "langchain_openai-0.1.22.tar.gz", hash = "sha256:0cf93133f230a893e3b0cc2a792bbf2580950e879b577f6e8d4ff9963a7de44b"}, ] [package.dependencies] -langchain-core = ">=0.2.29,<0.3.0" +langchain-core = ">=0.2.33,<0.3.0" openai = ">=1.40.0,<2.0.0" tiktoken = ">=0.7,<1" @@ -2255,13 +2255,13 @@ dev = ["Sphinx (>=5.1.1)", "black (==23.12.1)", "build (>=0.10.0)", "coverage (> [[package]] name = "litellm" -version = "1.43.13" +version = "1.43.17" description = "Library to easily interface with LLM API providers" optional = false python-versions = "!=2.7.*,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,!=3.7.*,>=3.8" files = [ - {file = "litellm-1.43.13-py3-none-any.whl", hash = "sha256:47c27c1c1b394d6098c68eec637008b07a254dadc4b82206b1a9f960621a8776"}, - {file = "litellm-1.43.13.tar.gz", hash = "sha256:b0273cbed3f7a35f197c98d92b1a13038b430e5e78d30db7d94d8237a3b98641"}, + {file = "litellm-1.43.17-py3-none-any.whl", hash = "sha256:f5d68c812f087b49266631e09ae78b48b3ea03cd2e04e7760162a5919c5ccec7"}, + {file = "litellm-1.43.17.tar.gz", hash = "sha256:8ac82b18bf6ae7c29627e8e5d89b183f075b32fb7027b17d2fb7d7d0b7cf8b7f"}, ] [package.dependencies] @@ -2867,13 +2867,13 @@ files = [ [[package]] name = "openai" -version = "1.40.8" +version = "1.41.0" description = "The official Python library for the openai API" optional = false python-versions = ">=3.7.1" files = [ - {file = "openai-1.40.8-py3-none-any.whl", hash = "sha256:3ed4ddad48e0dde059c9b4d3dc240e47781beca2811e52ba449ddc4a471a2fd4"}, - {file = "openai-1.40.8.tar.gz", hash = "sha256:e225f830b946378e214c5b2cfa8df28ba2aeb7e9d44f738cb2a926fd971f5bc0"}, + {file = "openai-1.41.0-py3-none-any.whl", hash = "sha256:3b6cca4571667f3e0800442ef8f2bfa6a6f3301c51776bc7626159a4d81c242c"}, + {file = "openai-1.41.0.tar.gz", hash = "sha256:26b81f39b49dce92ff5d30c373625ddb212c2f1050e1574e456d18423730cdd0"}, ] [package.dependencies] @@ -4798,13 +4798,13 @@ test = ["argcomplete (>=3.0.3)", "mypy (>=1.7.0)", "pre-commit", "pytest (>=7.0, [[package]] name = "typer" -version = "0.12.3" +version = "0.12.4" description = "Typer, build great CLIs. Easy to code. Based on Python type hints." optional = false python-versions = ">=3.7" files = [ - {file = "typer-0.12.3-py3-none-any.whl", hash = "sha256:070d7ca53f785acbccba8e7d28b08dcd88f79f1fbda035ade0aecec71ca5c914"}, - {file = "typer-0.12.3.tar.gz", hash = "sha256:49e73131481d804288ef62598d97a1ceef3058905aa536a1134f90891ba35482"}, + {file = "typer-0.12.4-py3-none-any.whl", hash = "sha256:819aa03699f438397e876aa12b0d63766864ecba1b579092cc9fe35d886e34b6"}, + {file = "typer-0.12.4.tar.gz", hash = "sha256:c9c1613ed6a166162705b3347b8d10b661ccc5d95692654d0fb628118f2c34e6"}, ] [package.dependencies] diff --git a/sdks/python/julep/api/__init__.py b/sdks/python/julep/api/__init__.py index 69e133f76..a884514b0 100644 --- a/sdks/python/julep/api/__init__.py +++ b/sdks/python/julep/api/__init__.py @@ -126,49 +126,114 @@ TaskExecutionsRouteListRequestSortBy, TaskExecutionsRouteListResponse, TasksBaseWorkflowStep, + TasksCaseThen, + TasksCaseThenThen, TasksCreateTaskRequest, TasksCreateTaskRequestMainItem, + TasksCreateTaskRequestMainItem_Embed, TasksCreateTaskRequestMainItem_Error, TasksCreateTaskRequestMainItem_Evaluate, + TasksCreateTaskRequestMainItem_Foreach, + TasksCreateTaskRequestMainItem_Get, TasksCreateTaskRequestMainItem_IfElse, + TasksCreateTaskRequestMainItem_Log, + TasksCreateTaskRequestMainItem_MapReduce, + TasksCreateTaskRequestMainItem_Parallel, TasksCreateTaskRequestMainItem_Prompt, + TasksCreateTaskRequestMainItem_Return, + TasksCreateTaskRequestMainItem_Search, + TasksCreateTaskRequestMainItem_Set, + TasksCreateTaskRequestMainItem_Sleep, + TasksCreateTaskRequestMainItem_Switch, TasksCreateTaskRequestMainItem_ToolCall, TasksCreateTaskRequestMainItem_WaitForInput, TasksCreateTaskRequestMainItem_Yield, + TasksEmbedStep, TasksErrorWorkflowStep, TasksEvaluateStep, + TasksForeachDo, + TasksForeachDoDoItem, + TasksForeachStep, + TasksGetStep, TasksIfElseWorkflowStep, TasksIfElseWorkflowStepElse, TasksIfElseWorkflowStepThen, + TasksLogStep, + TasksMapOver, + TasksMapReduceStep, + TasksParallelStep, + TasksParallelStepParallelItem, TasksPatchTaskRequestMainItem, + TasksPatchTaskRequestMainItem_Embed, TasksPatchTaskRequestMainItem_Error, TasksPatchTaskRequestMainItem_Evaluate, + TasksPatchTaskRequestMainItem_Foreach, + TasksPatchTaskRequestMainItem_Get, TasksPatchTaskRequestMainItem_IfElse, + TasksPatchTaskRequestMainItem_Log, + TasksPatchTaskRequestMainItem_MapReduce, + TasksPatchTaskRequestMainItem_Parallel, TasksPatchTaskRequestMainItem_Prompt, + TasksPatchTaskRequestMainItem_Return, + TasksPatchTaskRequestMainItem_Search, + TasksPatchTaskRequestMainItem_Set, + TasksPatchTaskRequestMainItem_Sleep, + TasksPatchTaskRequestMainItem_Switch, TasksPatchTaskRequestMainItem_ToolCall, TasksPatchTaskRequestMainItem_WaitForInput, TasksPatchTaskRequestMainItem_Yield, TasksPromptStep, TasksPromptStepPrompt, + TasksReturnStep, TasksRouteListRequestDirection, TasksRouteListRequestSortBy, TasksRouteListResponse, + TasksSearchStep, + TasksSearchStepSearch, + TasksSetKey, + TasksSetStep, + TasksSetStepSet, + TasksSleepFor, + TasksSleepStep, + TasksSwitchStep, TasksTask, TasksTaskMainItem, + TasksTaskMainItem_Embed, TasksTaskMainItem_Error, TasksTaskMainItem_Evaluate, + TasksTaskMainItem_Foreach, + TasksTaskMainItem_Get, TasksTaskMainItem_IfElse, + TasksTaskMainItem_Log, + TasksTaskMainItem_MapReduce, + TasksTaskMainItem_Parallel, TasksTaskMainItem_Prompt, + TasksTaskMainItem_Return, + TasksTaskMainItem_Search, + TasksTaskMainItem_Set, + TasksTaskMainItem_Sleep, + TasksTaskMainItem_Switch, TasksTaskMainItem_ToolCall, TasksTaskMainItem_WaitForInput, TasksTaskMainItem_Yield, TasksTaskTool, TasksToolCallStep, TasksUpdateTaskRequestMainItem, + TasksUpdateTaskRequestMainItem_Embed, TasksUpdateTaskRequestMainItem_Error, TasksUpdateTaskRequestMainItem_Evaluate, + TasksUpdateTaskRequestMainItem_Foreach, + TasksUpdateTaskRequestMainItem_Get, TasksUpdateTaskRequestMainItem_IfElse, + TasksUpdateTaskRequestMainItem_Log, + TasksUpdateTaskRequestMainItem_MapReduce, + TasksUpdateTaskRequestMainItem_Parallel, TasksUpdateTaskRequestMainItem_Prompt, + TasksUpdateTaskRequestMainItem_Return, + TasksUpdateTaskRequestMainItem_Search, + TasksUpdateTaskRequestMainItem_Set, + TasksUpdateTaskRequestMainItem_Sleep, + TasksUpdateTaskRequestMainItem_Switch, TasksUpdateTaskRequestMainItem_ToolCall, TasksUpdateTaskRequestMainItem_WaitForInput, TasksUpdateTaskRequestMainItem_Yield, @@ -329,49 +394,114 @@ "TaskExecutionsRouteListRequestSortBy", "TaskExecutionsRouteListResponse", "TasksBaseWorkflowStep", + "TasksCaseThen", + "TasksCaseThenThen", "TasksCreateTaskRequest", "TasksCreateTaskRequestMainItem", + "TasksCreateTaskRequestMainItem_Embed", "TasksCreateTaskRequestMainItem_Error", "TasksCreateTaskRequestMainItem_Evaluate", + "TasksCreateTaskRequestMainItem_Foreach", + "TasksCreateTaskRequestMainItem_Get", "TasksCreateTaskRequestMainItem_IfElse", + "TasksCreateTaskRequestMainItem_Log", + "TasksCreateTaskRequestMainItem_MapReduce", + "TasksCreateTaskRequestMainItem_Parallel", "TasksCreateTaskRequestMainItem_Prompt", + "TasksCreateTaskRequestMainItem_Return", + "TasksCreateTaskRequestMainItem_Search", + "TasksCreateTaskRequestMainItem_Set", + "TasksCreateTaskRequestMainItem_Sleep", + "TasksCreateTaskRequestMainItem_Switch", "TasksCreateTaskRequestMainItem_ToolCall", "TasksCreateTaskRequestMainItem_WaitForInput", "TasksCreateTaskRequestMainItem_Yield", + "TasksEmbedStep", "TasksErrorWorkflowStep", "TasksEvaluateStep", + "TasksForeachDo", + "TasksForeachDoDoItem", + "TasksForeachStep", + "TasksGetStep", "TasksIfElseWorkflowStep", "TasksIfElseWorkflowStepElse", "TasksIfElseWorkflowStepThen", + "TasksLogStep", + "TasksMapOver", + "TasksMapReduceStep", + "TasksParallelStep", + "TasksParallelStepParallelItem", "TasksPatchTaskRequestMainItem", + "TasksPatchTaskRequestMainItem_Embed", "TasksPatchTaskRequestMainItem_Error", "TasksPatchTaskRequestMainItem_Evaluate", + "TasksPatchTaskRequestMainItem_Foreach", + "TasksPatchTaskRequestMainItem_Get", "TasksPatchTaskRequestMainItem_IfElse", + "TasksPatchTaskRequestMainItem_Log", + "TasksPatchTaskRequestMainItem_MapReduce", + "TasksPatchTaskRequestMainItem_Parallel", "TasksPatchTaskRequestMainItem_Prompt", + "TasksPatchTaskRequestMainItem_Return", + "TasksPatchTaskRequestMainItem_Search", + "TasksPatchTaskRequestMainItem_Set", + "TasksPatchTaskRequestMainItem_Sleep", + "TasksPatchTaskRequestMainItem_Switch", "TasksPatchTaskRequestMainItem_ToolCall", "TasksPatchTaskRequestMainItem_WaitForInput", "TasksPatchTaskRequestMainItem_Yield", "TasksPromptStep", "TasksPromptStepPrompt", + "TasksReturnStep", "TasksRouteListRequestDirection", "TasksRouteListRequestSortBy", "TasksRouteListResponse", + "TasksSearchStep", + "TasksSearchStepSearch", + "TasksSetKey", + "TasksSetStep", + "TasksSetStepSet", + "TasksSleepFor", + "TasksSleepStep", + "TasksSwitchStep", "TasksTask", "TasksTaskMainItem", + "TasksTaskMainItem_Embed", "TasksTaskMainItem_Error", "TasksTaskMainItem_Evaluate", + "TasksTaskMainItem_Foreach", + "TasksTaskMainItem_Get", "TasksTaskMainItem_IfElse", + "TasksTaskMainItem_Log", + "TasksTaskMainItem_MapReduce", + "TasksTaskMainItem_Parallel", "TasksTaskMainItem_Prompt", + "TasksTaskMainItem_Return", + "TasksTaskMainItem_Search", + "TasksTaskMainItem_Set", + "TasksTaskMainItem_Sleep", + "TasksTaskMainItem_Switch", "TasksTaskMainItem_ToolCall", "TasksTaskMainItem_WaitForInput", "TasksTaskMainItem_Yield", "TasksTaskTool", "TasksToolCallStep", "TasksUpdateTaskRequestMainItem", + "TasksUpdateTaskRequestMainItem_Embed", "TasksUpdateTaskRequestMainItem_Error", "TasksUpdateTaskRequestMainItem_Evaluate", + "TasksUpdateTaskRequestMainItem_Foreach", + "TasksUpdateTaskRequestMainItem_Get", "TasksUpdateTaskRequestMainItem_IfElse", + "TasksUpdateTaskRequestMainItem_Log", + "TasksUpdateTaskRequestMainItem_MapReduce", + "TasksUpdateTaskRequestMainItem_Parallel", "TasksUpdateTaskRequestMainItem_Prompt", + "TasksUpdateTaskRequestMainItem_Return", + "TasksUpdateTaskRequestMainItem_Search", + "TasksUpdateTaskRequestMainItem_Set", + "TasksUpdateTaskRequestMainItem_Sleep", + "TasksUpdateTaskRequestMainItem_Switch", "TasksUpdateTaskRequestMainItem_ToolCall", "TasksUpdateTaskRequestMainItem_WaitForInput", "TasksUpdateTaskRequestMainItem_Yield", diff --git a/sdks/python/julep/api/types/__init__.py b/sdks/python/julep/api/types/__init__.py index 575f35db9..a534c0a6e 100644 --- a/sdks/python/julep/api/types/__init__.py +++ b/sdks/python/julep/api/types/__init__.py @@ -161,44 +161,98 @@ ) from .task_executions_route_list_response import TaskExecutionsRouteListResponse from .tasks_base_workflow_step import TasksBaseWorkflowStep +from .tasks_case_then import TasksCaseThen +from .tasks_case_then_then import TasksCaseThenThen from .tasks_create_task_request import TasksCreateTaskRequest from .tasks_create_task_request_main_item import ( TasksCreateTaskRequestMainItem, + TasksCreateTaskRequestMainItem_Embed, TasksCreateTaskRequestMainItem_Error, TasksCreateTaskRequestMainItem_Evaluate, + TasksCreateTaskRequestMainItem_Foreach, + TasksCreateTaskRequestMainItem_Get, TasksCreateTaskRequestMainItem_IfElse, + TasksCreateTaskRequestMainItem_Log, + TasksCreateTaskRequestMainItem_MapReduce, + TasksCreateTaskRequestMainItem_Parallel, TasksCreateTaskRequestMainItem_Prompt, + TasksCreateTaskRequestMainItem_Return, + TasksCreateTaskRequestMainItem_Search, + TasksCreateTaskRequestMainItem_Set, + TasksCreateTaskRequestMainItem_Sleep, + TasksCreateTaskRequestMainItem_Switch, TasksCreateTaskRequestMainItem_ToolCall, TasksCreateTaskRequestMainItem_WaitForInput, TasksCreateTaskRequestMainItem_Yield, ) +from .tasks_embed_step import TasksEmbedStep from .tasks_error_workflow_step import TasksErrorWorkflowStep from .tasks_evaluate_step import TasksEvaluateStep +from .tasks_foreach_do import TasksForeachDo +from .tasks_foreach_do_do_item import TasksForeachDoDoItem +from .tasks_foreach_step import TasksForeachStep +from .tasks_get_step import TasksGetStep from .tasks_if_else_workflow_step import TasksIfElseWorkflowStep from .tasks_if_else_workflow_step_else import TasksIfElseWorkflowStepElse from .tasks_if_else_workflow_step_then import TasksIfElseWorkflowStepThen +from .tasks_log_step import TasksLogStep +from .tasks_map_over import TasksMapOver +from .tasks_map_reduce_step import TasksMapReduceStep +from .tasks_parallel_step import TasksParallelStep +from .tasks_parallel_step_parallel_item import TasksParallelStepParallelItem from .tasks_patch_task_request_main_item import ( TasksPatchTaskRequestMainItem, + TasksPatchTaskRequestMainItem_Embed, TasksPatchTaskRequestMainItem_Error, TasksPatchTaskRequestMainItem_Evaluate, + TasksPatchTaskRequestMainItem_Foreach, + TasksPatchTaskRequestMainItem_Get, TasksPatchTaskRequestMainItem_IfElse, + TasksPatchTaskRequestMainItem_Log, + TasksPatchTaskRequestMainItem_MapReduce, + TasksPatchTaskRequestMainItem_Parallel, TasksPatchTaskRequestMainItem_Prompt, + TasksPatchTaskRequestMainItem_Return, + TasksPatchTaskRequestMainItem_Search, + TasksPatchTaskRequestMainItem_Set, + TasksPatchTaskRequestMainItem_Sleep, + TasksPatchTaskRequestMainItem_Switch, TasksPatchTaskRequestMainItem_ToolCall, TasksPatchTaskRequestMainItem_WaitForInput, TasksPatchTaskRequestMainItem_Yield, ) from .tasks_prompt_step import TasksPromptStep from .tasks_prompt_step_prompt import TasksPromptStepPrompt +from .tasks_return_step import TasksReturnStep from .tasks_route_list_request_direction import TasksRouteListRequestDirection from .tasks_route_list_request_sort_by import TasksRouteListRequestSortBy from .tasks_route_list_response import TasksRouteListResponse +from .tasks_search_step import TasksSearchStep +from .tasks_search_step_search import TasksSearchStepSearch +from .tasks_set_key import TasksSetKey +from .tasks_set_step import TasksSetStep +from .tasks_set_step_set import TasksSetStepSet +from .tasks_sleep_for import TasksSleepFor +from .tasks_sleep_step import TasksSleepStep +from .tasks_switch_step import TasksSwitchStep from .tasks_task import TasksTask from .tasks_task_main_item import ( TasksTaskMainItem, + TasksTaskMainItem_Embed, TasksTaskMainItem_Error, TasksTaskMainItem_Evaluate, + TasksTaskMainItem_Foreach, + TasksTaskMainItem_Get, TasksTaskMainItem_IfElse, + TasksTaskMainItem_Log, + TasksTaskMainItem_MapReduce, + TasksTaskMainItem_Parallel, TasksTaskMainItem_Prompt, + TasksTaskMainItem_Return, + TasksTaskMainItem_Search, + TasksTaskMainItem_Set, + TasksTaskMainItem_Sleep, + TasksTaskMainItem_Switch, TasksTaskMainItem_ToolCall, TasksTaskMainItem_WaitForInput, TasksTaskMainItem_Yield, @@ -207,10 +261,21 @@ from .tasks_tool_call_step import TasksToolCallStep from .tasks_update_task_request_main_item import ( TasksUpdateTaskRequestMainItem, + TasksUpdateTaskRequestMainItem_Embed, TasksUpdateTaskRequestMainItem_Error, TasksUpdateTaskRequestMainItem_Evaluate, + TasksUpdateTaskRequestMainItem_Foreach, + TasksUpdateTaskRequestMainItem_Get, TasksUpdateTaskRequestMainItem_IfElse, + TasksUpdateTaskRequestMainItem_Log, + TasksUpdateTaskRequestMainItem_MapReduce, + TasksUpdateTaskRequestMainItem_Parallel, TasksUpdateTaskRequestMainItem_Prompt, + TasksUpdateTaskRequestMainItem_Return, + TasksUpdateTaskRequestMainItem_Search, + TasksUpdateTaskRequestMainItem_Set, + TasksUpdateTaskRequestMainItem_Sleep, + TasksUpdateTaskRequestMainItem_Switch, TasksUpdateTaskRequestMainItem_ToolCall, TasksUpdateTaskRequestMainItem_WaitForInput, TasksUpdateTaskRequestMainItem_Yield, @@ -368,49 +433,114 @@ "TaskExecutionsRouteListRequestSortBy", "TaskExecutionsRouteListResponse", "TasksBaseWorkflowStep", + "TasksCaseThen", + "TasksCaseThenThen", "TasksCreateTaskRequest", "TasksCreateTaskRequestMainItem", + "TasksCreateTaskRequestMainItem_Embed", "TasksCreateTaskRequestMainItem_Error", "TasksCreateTaskRequestMainItem_Evaluate", + "TasksCreateTaskRequestMainItem_Foreach", + "TasksCreateTaskRequestMainItem_Get", "TasksCreateTaskRequestMainItem_IfElse", + "TasksCreateTaskRequestMainItem_Log", + "TasksCreateTaskRequestMainItem_MapReduce", + "TasksCreateTaskRequestMainItem_Parallel", "TasksCreateTaskRequestMainItem_Prompt", + "TasksCreateTaskRequestMainItem_Return", + "TasksCreateTaskRequestMainItem_Search", + "TasksCreateTaskRequestMainItem_Set", + "TasksCreateTaskRequestMainItem_Sleep", + "TasksCreateTaskRequestMainItem_Switch", "TasksCreateTaskRequestMainItem_ToolCall", "TasksCreateTaskRequestMainItem_WaitForInput", "TasksCreateTaskRequestMainItem_Yield", + "TasksEmbedStep", "TasksErrorWorkflowStep", "TasksEvaluateStep", + "TasksForeachDo", + "TasksForeachDoDoItem", + "TasksForeachStep", + "TasksGetStep", "TasksIfElseWorkflowStep", "TasksIfElseWorkflowStepElse", "TasksIfElseWorkflowStepThen", + "TasksLogStep", + "TasksMapOver", + "TasksMapReduceStep", + "TasksParallelStep", + "TasksParallelStepParallelItem", "TasksPatchTaskRequestMainItem", + "TasksPatchTaskRequestMainItem_Embed", "TasksPatchTaskRequestMainItem_Error", "TasksPatchTaskRequestMainItem_Evaluate", + "TasksPatchTaskRequestMainItem_Foreach", + "TasksPatchTaskRequestMainItem_Get", "TasksPatchTaskRequestMainItem_IfElse", + "TasksPatchTaskRequestMainItem_Log", + "TasksPatchTaskRequestMainItem_MapReduce", + "TasksPatchTaskRequestMainItem_Parallel", "TasksPatchTaskRequestMainItem_Prompt", + "TasksPatchTaskRequestMainItem_Return", + "TasksPatchTaskRequestMainItem_Search", + "TasksPatchTaskRequestMainItem_Set", + "TasksPatchTaskRequestMainItem_Sleep", + "TasksPatchTaskRequestMainItem_Switch", "TasksPatchTaskRequestMainItem_ToolCall", "TasksPatchTaskRequestMainItem_WaitForInput", "TasksPatchTaskRequestMainItem_Yield", "TasksPromptStep", "TasksPromptStepPrompt", + "TasksReturnStep", "TasksRouteListRequestDirection", "TasksRouteListRequestSortBy", "TasksRouteListResponse", + "TasksSearchStep", + "TasksSearchStepSearch", + "TasksSetKey", + "TasksSetStep", + "TasksSetStepSet", + "TasksSleepFor", + "TasksSleepStep", + "TasksSwitchStep", "TasksTask", "TasksTaskMainItem", + "TasksTaskMainItem_Embed", "TasksTaskMainItem_Error", "TasksTaskMainItem_Evaluate", + "TasksTaskMainItem_Foreach", + "TasksTaskMainItem_Get", "TasksTaskMainItem_IfElse", + "TasksTaskMainItem_Log", + "TasksTaskMainItem_MapReduce", + "TasksTaskMainItem_Parallel", "TasksTaskMainItem_Prompt", + "TasksTaskMainItem_Return", + "TasksTaskMainItem_Search", + "TasksTaskMainItem_Set", + "TasksTaskMainItem_Sleep", + "TasksTaskMainItem_Switch", "TasksTaskMainItem_ToolCall", "TasksTaskMainItem_WaitForInput", "TasksTaskMainItem_Yield", "TasksTaskTool", "TasksToolCallStep", "TasksUpdateTaskRequestMainItem", + "TasksUpdateTaskRequestMainItem_Embed", "TasksUpdateTaskRequestMainItem_Error", "TasksUpdateTaskRequestMainItem_Evaluate", + "TasksUpdateTaskRequestMainItem_Foreach", + "TasksUpdateTaskRequestMainItem_Get", "TasksUpdateTaskRequestMainItem_IfElse", + "TasksUpdateTaskRequestMainItem_Log", + "TasksUpdateTaskRequestMainItem_MapReduce", + "TasksUpdateTaskRequestMainItem_Parallel", "TasksUpdateTaskRequestMainItem_Prompt", + "TasksUpdateTaskRequestMainItem_Return", + "TasksUpdateTaskRequestMainItem_Search", + "TasksUpdateTaskRequestMainItem_Set", + "TasksUpdateTaskRequestMainItem_Sleep", + "TasksUpdateTaskRequestMainItem_Switch", "TasksUpdateTaskRequestMainItem_ToolCall", "TasksUpdateTaskRequestMainItem_WaitForInput", "TasksUpdateTaskRequestMainItem_Yield", diff --git a/sdks/python/julep/api/types/tasks_case_then.py b/sdks/python/julep/api/types/tasks_case_then.py new file mode 100644 index 000000000..1ab5ac484 --- /dev/null +++ b/sdks/python/julep/api/types/tasks_case_then.py @@ -0,0 +1,52 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing + +from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +from .common_py_expression import CommonPyExpression +from .tasks_case_then_then import TasksCaseThenThen + + +class TasksCaseThen(pydantic_v1.BaseModel): + case: CommonPyExpression = pydantic_v1.Field() + """ + The condition to evaluate + """ + + then: TasksCaseThenThen = pydantic_v1.Field() + """ + The steps to run if the condition is true + """ + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} diff --git a/sdks/python/julep/api/types/tasks_case_then_then.py b/sdks/python/julep/api/types/tasks_case_then_then.py new file mode 100644 index 000000000..3680b8720 --- /dev/null +++ b/sdks/python/julep/api/types/tasks_case_then_then.py @@ -0,0 +1,32 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +from .tasks_embed_step import TasksEmbedStep +from .tasks_error_workflow_step import TasksErrorWorkflowStep +from .tasks_get_step import TasksGetStep +from .tasks_log_step import TasksLogStep +from .tasks_prompt_step import TasksPromptStep +from .tasks_return_step import TasksReturnStep +from .tasks_search_step import TasksSearchStep +from .tasks_set_step import TasksSetStep +from .tasks_sleep_step import TasksSleepStep +from .tasks_tool_call_step import TasksToolCallStep +from .tasks_wait_for_input_step import TasksWaitForInputStep +from .tasks_yield_step import TasksYieldStep + +TasksCaseThenThen = typing.Union[ + typing.Any, + TasksToolCallStep, + TasksYieldStep, + TasksPromptStep, + TasksErrorWorkflowStep, + TasksSleepStep, + TasksReturnStep, + TasksGetStep, + TasksSetStep, + TasksLogStep, + TasksEmbedStep, + TasksSearchStep, + TasksWaitForInputStep, +] diff --git a/sdks/python/julep/api/types/tasks_create_task_request_main_item.py b/sdks/python/julep/api/types/tasks_create_task_request_main_item.py index ae9916c31..0cfa46d96 100644 --- a/sdks/python/julep/api/types/tasks_create_task_request_main_item.py +++ b/sdks/python/julep/api/types/tasks_create_task_request_main_item.py @@ -10,9 +10,17 @@ from .chat_chat_settings import ChatChatSettings from .common_py_expression import CommonPyExpression from .common_tool_ref import CommonToolRef +from .docs_embed_query_request import DocsEmbedQueryRequest +from .tasks_case_then import TasksCaseThen +from .tasks_foreach_do import TasksForeachDo from .tasks_if_else_workflow_step_else import TasksIfElseWorkflowStepElse from .tasks_if_else_workflow_step_then import TasksIfElseWorkflowStepThen +from .tasks_map_over import TasksMapOver +from .tasks_parallel_step_parallel_item import TasksParallelStepParallelItem from .tasks_prompt_step_prompt import TasksPromptStepPrompt +from .tasks_search_step_search import TasksSearchStepSearch +from .tasks_set_step_set import TasksSetStepSet +from .tasks_sleep_for import TasksSleepFor from .tasks_wait_for_input_step_info import TasksWaitForInputStepInfo @@ -213,6 +221,272 @@ class Config: json_encoders = {dt.datetime: serialize_datetime} +class TasksCreateTaskRequestMainItem_Sleep(pydantic_v1.BaseModel): + sleep: TasksSleepFor + kind: typing.Literal["sleep"] = pydantic_v1.Field(alias="kind_", default="sleep") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksCreateTaskRequestMainItem_Return(pydantic_v1.BaseModel): + return_: typing.Dict[str, CommonPyExpression] = pydantic_v1.Field(alias="return") + kind: typing.Literal["return"] = pydantic_v1.Field(alias="kind_", default="return") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksCreateTaskRequestMainItem_Get(pydantic_v1.BaseModel): + get: str + kind: typing.Literal["get"] = pydantic_v1.Field(alias="kind_", default="get") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksCreateTaskRequestMainItem_Set(pydantic_v1.BaseModel): + set_: TasksSetStepSet = pydantic_v1.Field(alias="set") + kind: typing.Literal["set"] = pydantic_v1.Field(alias="kind_", default="set") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksCreateTaskRequestMainItem_Log(pydantic_v1.BaseModel): + log: CommonPyExpression + kind: typing.Literal["log"] = pydantic_v1.Field(alias="kind_", default="log") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksCreateTaskRequestMainItem_Embed(pydantic_v1.BaseModel): + embed: DocsEmbedQueryRequest + kind: typing.Literal["embed"] = pydantic_v1.Field(alias="kind_", default="embed") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksCreateTaskRequestMainItem_Search(pydantic_v1.BaseModel): + search: TasksSearchStepSearch + kind: typing.Literal["search"] = pydantic_v1.Field(alias="kind_", default="search") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + class TasksCreateTaskRequestMainItem_WaitForInput(pydantic_v1.BaseModel): info: TasksWaitForInputStepInfo kind: typing.Literal["wait_for_input"] = pydantic_v1.Field( @@ -295,12 +569,182 @@ class Config: json_encoders = {dt.datetime: serialize_datetime} +class TasksCreateTaskRequestMainItem_Switch(pydantic_v1.BaseModel): + switch: typing.List[TasksCaseThen] + kind: typing.Literal["switch"] = pydantic_v1.Field(alias="kind_", default="switch") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksCreateTaskRequestMainItem_Foreach(pydantic_v1.BaseModel): + foreach: TasksForeachDo + kind: typing.Literal["foreach"] = pydantic_v1.Field( + alias="kind_", default="foreach" + ) + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksCreateTaskRequestMainItem_Parallel(pydantic_v1.BaseModel): + parallel: typing.List[TasksParallelStepParallelItem] + kind: typing.Literal["parallel"] = pydantic_v1.Field( + alias="kind_", default="parallel" + ) + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksCreateTaskRequestMainItem_MapReduce(pydantic_v1.BaseModel): + map_: TasksMapOver = pydantic_v1.Field(alias="map") + reduce: CommonPyExpression + kind: typing.Literal["map_reduce"] = pydantic_v1.Field( + alias="kind_", default="map_reduce" + ) + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + TasksCreateTaskRequestMainItem = typing.Union[ TasksCreateTaskRequestMainItem_Evaluate, TasksCreateTaskRequestMainItem_ToolCall, TasksCreateTaskRequestMainItem_Yield, TasksCreateTaskRequestMainItem_Prompt, TasksCreateTaskRequestMainItem_Error, + TasksCreateTaskRequestMainItem_Sleep, + TasksCreateTaskRequestMainItem_Return, + TasksCreateTaskRequestMainItem_Get, + TasksCreateTaskRequestMainItem_Set, + TasksCreateTaskRequestMainItem_Log, + TasksCreateTaskRequestMainItem_Embed, + TasksCreateTaskRequestMainItem_Search, TasksCreateTaskRequestMainItem_WaitForInput, TasksCreateTaskRequestMainItem_IfElse, + TasksCreateTaskRequestMainItem_Switch, + TasksCreateTaskRequestMainItem_Foreach, + TasksCreateTaskRequestMainItem_Parallel, + TasksCreateTaskRequestMainItem_MapReduce, ] diff --git a/sdks/python/julep/api/types/tasks_embed_step.py b/sdks/python/julep/api/types/tasks_embed_step.py new file mode 100644 index 000000000..13e9bcfd8 --- /dev/null +++ b/sdks/python/julep/api/types/tasks_embed_step.py @@ -0,0 +1,49 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing + +from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +from .docs_embed_query_request import DocsEmbedQueryRequest +from .tasks_base_workflow_step import TasksBaseWorkflowStep + + +class TasksEmbedStep(TasksBaseWorkflowStep): + embed: DocsEmbedQueryRequest = pydantic_v1.Field() + """ + The text to embed + """ + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} diff --git a/sdks/python/julep/api/types/tasks_foreach_do.py b/sdks/python/julep/api/types/tasks_foreach_do.py new file mode 100644 index 000000000..8ee662103 --- /dev/null +++ b/sdks/python/julep/api/types/tasks_foreach_do.py @@ -0,0 +1,54 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing + +from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +from .common_py_expression import CommonPyExpression +from .tasks_foreach_do_do_item import TasksForeachDoDoItem + + +class TasksForeachDo(pydantic_v1.BaseModel): + in_: CommonPyExpression = pydantic_v1.Field(alias="in") + """ + The variable to iterate over + """ + + do: typing.List[TasksForeachDoDoItem] = pydantic_v1.Field() + """ + The steps to run for each iteration + """ + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} diff --git a/sdks/python/julep/api/types/tasks_foreach_do_do_item.py b/sdks/python/julep/api/types/tasks_foreach_do_do_item.py new file mode 100644 index 000000000..f8c2a90bf --- /dev/null +++ b/sdks/python/julep/api/types/tasks_foreach_do_do_item.py @@ -0,0 +1,32 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +from .tasks_embed_step import TasksEmbedStep +from .tasks_error_workflow_step import TasksErrorWorkflowStep +from .tasks_get_step import TasksGetStep +from .tasks_log_step import TasksLogStep +from .tasks_prompt_step import TasksPromptStep +from .tasks_return_step import TasksReturnStep +from .tasks_search_step import TasksSearchStep +from .tasks_set_step import TasksSetStep +from .tasks_sleep_step import TasksSleepStep +from .tasks_tool_call_step import TasksToolCallStep +from .tasks_wait_for_input_step import TasksWaitForInputStep +from .tasks_yield_step import TasksYieldStep + +TasksForeachDoDoItem = typing.Union[ + typing.Any, + TasksToolCallStep, + TasksYieldStep, + TasksPromptStep, + TasksErrorWorkflowStep, + TasksSleepStep, + TasksReturnStep, + TasksGetStep, + TasksSetStep, + TasksLogStep, + TasksEmbedStep, + TasksSearchStep, + TasksWaitForInputStep, +] diff --git a/sdks/python/julep/api/types/tasks_foreach_step.py b/sdks/python/julep/api/types/tasks_foreach_step.py new file mode 100644 index 000000000..03c86dce2 --- /dev/null +++ b/sdks/python/julep/api/types/tasks_foreach_step.py @@ -0,0 +1,49 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing + +from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +from .tasks_base_workflow_step import TasksBaseWorkflowStep +from .tasks_foreach_do import TasksForeachDo + + +class TasksForeachStep(TasksBaseWorkflowStep): + foreach: TasksForeachDo = pydantic_v1.Field() + """ + The steps to run for each iteration + """ + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} diff --git a/sdks/python/julep/api/types/tasks_get_step.py b/sdks/python/julep/api/types/tasks_get_step.py new file mode 100644 index 000000000..c6fa9748e --- /dev/null +++ b/sdks/python/julep/api/types/tasks_get_step.py @@ -0,0 +1,48 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing + +from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +from .tasks_base_workflow_step import TasksBaseWorkflowStep + + +class TasksGetStep(TasksBaseWorkflowStep): + get: str = pydantic_v1.Field() + """ + The key to get + """ + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} diff --git a/sdks/python/julep/api/types/tasks_if_else_workflow_step_else.py b/sdks/python/julep/api/types/tasks_if_else_workflow_step_else.py index 37d64c117..754449134 100644 --- a/sdks/python/julep/api/types/tasks_if_else_workflow_step_else.py +++ b/sdks/python/julep/api/types/tasks_if_else_workflow_step_else.py @@ -2,8 +2,15 @@ import typing +from .tasks_embed_step import TasksEmbedStep from .tasks_error_workflow_step import TasksErrorWorkflowStep +from .tasks_get_step import TasksGetStep +from .tasks_log_step import TasksLogStep from .tasks_prompt_step import TasksPromptStep +from .tasks_return_step import TasksReturnStep +from .tasks_search_step import TasksSearchStep +from .tasks_set_step import TasksSetStep +from .tasks_sleep_step import TasksSleepStep from .tasks_tool_call_step import TasksToolCallStep from .tasks_wait_for_input_step import TasksWaitForInputStep from .tasks_yield_step import TasksYieldStep @@ -14,5 +21,12 @@ TasksYieldStep, TasksPromptStep, TasksErrorWorkflowStep, + TasksSleepStep, + TasksReturnStep, + TasksGetStep, + TasksSetStep, + TasksLogStep, + TasksEmbedStep, + TasksSearchStep, TasksWaitForInputStep, ] diff --git a/sdks/python/julep/api/types/tasks_if_else_workflow_step_then.py b/sdks/python/julep/api/types/tasks_if_else_workflow_step_then.py index a706d4c86..9e914c54d 100644 --- a/sdks/python/julep/api/types/tasks_if_else_workflow_step_then.py +++ b/sdks/python/julep/api/types/tasks_if_else_workflow_step_then.py @@ -2,8 +2,15 @@ import typing +from .tasks_embed_step import TasksEmbedStep from .tasks_error_workflow_step import TasksErrorWorkflowStep +from .tasks_get_step import TasksGetStep +from .tasks_log_step import TasksLogStep from .tasks_prompt_step import TasksPromptStep +from .tasks_return_step import TasksReturnStep +from .tasks_search_step import TasksSearchStep +from .tasks_set_step import TasksSetStep +from .tasks_sleep_step import TasksSleepStep from .tasks_tool_call_step import TasksToolCallStep from .tasks_wait_for_input_step import TasksWaitForInputStep from .tasks_yield_step import TasksYieldStep @@ -14,5 +21,12 @@ TasksYieldStep, TasksPromptStep, TasksErrorWorkflowStep, + TasksSleepStep, + TasksReturnStep, + TasksGetStep, + TasksSetStep, + TasksLogStep, + TasksEmbedStep, + TasksSearchStep, TasksWaitForInputStep, ] diff --git a/sdks/python/julep/api/types/tasks_log_step.py b/sdks/python/julep/api/types/tasks_log_step.py new file mode 100644 index 000000000..649a8471d --- /dev/null +++ b/sdks/python/julep/api/types/tasks_log_step.py @@ -0,0 +1,49 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing + +from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +from .common_py_expression import CommonPyExpression +from .tasks_base_workflow_step import TasksBaseWorkflowStep + + +class TasksLogStep(TasksBaseWorkflowStep): + log: CommonPyExpression = pydantic_v1.Field() + """ + The value to log + """ + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} diff --git a/sdks/python/julep/api/types/tasks_map_over.py b/sdks/python/julep/api/types/tasks_map_over.py new file mode 100644 index 000000000..d3377ff00 --- /dev/null +++ b/sdks/python/julep/api/types/tasks_map_over.py @@ -0,0 +1,51 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing + +from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +from .common_py_expression import CommonPyExpression + + +class TasksMapOver(pydantic_v1.BaseModel): + over: CommonPyExpression = pydantic_v1.Field() + """ + The variable to iterate over + """ + + workflow: str = pydantic_v1.Field() + """ + The subworkflow to run for each iteration + """ + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} diff --git a/sdks/python/julep/api/types/tasks_map_reduce_step.py b/sdks/python/julep/api/types/tasks_map_reduce_step.py new file mode 100644 index 000000000..9d43304d5 --- /dev/null +++ b/sdks/python/julep/api/types/tasks_map_reduce_step.py @@ -0,0 +1,55 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing + +from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +from .common_py_expression import CommonPyExpression +from .tasks_base_workflow_step import TasksBaseWorkflowStep +from .tasks_map_over import TasksMapOver + + +class TasksMapReduceStep(TasksBaseWorkflowStep): + map_: TasksMapOver = pydantic_v1.Field(alias="map") + """ + The steps to run for each iteration + """ + + reduce: CommonPyExpression = pydantic_v1.Field() + """ + The expression to reduce the results (`_` is a list of outputs) + """ + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} diff --git a/sdks/python/julep/api/types/tasks_parallel_step.py b/sdks/python/julep/api/types/tasks_parallel_step.py new file mode 100644 index 000000000..7f3ba3bad --- /dev/null +++ b/sdks/python/julep/api/types/tasks_parallel_step.py @@ -0,0 +1,49 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing + +from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +from .tasks_base_workflow_step import TasksBaseWorkflowStep +from .tasks_parallel_step_parallel_item import TasksParallelStepParallelItem + + +class TasksParallelStep(TasksBaseWorkflowStep): + parallel: typing.List[TasksParallelStepParallelItem] = pydantic_v1.Field() + """ + The steps to run in parallel. Max concurrency will depend on the platform + """ + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} diff --git a/sdks/python/julep/api/types/tasks_parallel_step_parallel_item.py b/sdks/python/julep/api/types/tasks_parallel_step_parallel_item.py new file mode 100644 index 000000000..0b881fed7 --- /dev/null +++ b/sdks/python/julep/api/types/tasks_parallel_step_parallel_item.py @@ -0,0 +1,32 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +from .tasks_embed_step import TasksEmbedStep +from .tasks_error_workflow_step import TasksErrorWorkflowStep +from .tasks_get_step import TasksGetStep +from .tasks_log_step import TasksLogStep +from .tasks_prompt_step import TasksPromptStep +from .tasks_return_step import TasksReturnStep +from .tasks_search_step import TasksSearchStep +from .tasks_set_step import TasksSetStep +from .tasks_sleep_step import TasksSleepStep +from .tasks_tool_call_step import TasksToolCallStep +from .tasks_wait_for_input_step import TasksWaitForInputStep +from .tasks_yield_step import TasksYieldStep + +TasksParallelStepParallelItem = typing.Union[ + typing.Any, + TasksToolCallStep, + TasksYieldStep, + TasksPromptStep, + TasksErrorWorkflowStep, + TasksSleepStep, + TasksReturnStep, + TasksGetStep, + TasksSetStep, + TasksLogStep, + TasksEmbedStep, + TasksSearchStep, + TasksWaitForInputStep, +] diff --git a/sdks/python/julep/api/types/tasks_patch_task_request_main_item.py b/sdks/python/julep/api/types/tasks_patch_task_request_main_item.py index 6532321d0..83f4ae007 100644 --- a/sdks/python/julep/api/types/tasks_patch_task_request_main_item.py +++ b/sdks/python/julep/api/types/tasks_patch_task_request_main_item.py @@ -10,9 +10,17 @@ from .chat_chat_settings import ChatChatSettings from .common_py_expression import CommonPyExpression from .common_tool_ref import CommonToolRef +from .docs_embed_query_request import DocsEmbedQueryRequest +from .tasks_case_then import TasksCaseThen +from .tasks_foreach_do import TasksForeachDo from .tasks_if_else_workflow_step_else import TasksIfElseWorkflowStepElse from .tasks_if_else_workflow_step_then import TasksIfElseWorkflowStepThen +from .tasks_map_over import TasksMapOver +from .tasks_parallel_step_parallel_item import TasksParallelStepParallelItem from .tasks_prompt_step_prompt import TasksPromptStepPrompt +from .tasks_search_step_search import TasksSearchStepSearch +from .tasks_set_step_set import TasksSetStepSet +from .tasks_sleep_for import TasksSleepFor from .tasks_wait_for_input_step_info import TasksWaitForInputStepInfo @@ -213,6 +221,272 @@ class Config: json_encoders = {dt.datetime: serialize_datetime} +class TasksPatchTaskRequestMainItem_Sleep(pydantic_v1.BaseModel): + sleep: TasksSleepFor + kind: typing.Literal["sleep"] = pydantic_v1.Field(alias="kind_", default="sleep") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksPatchTaskRequestMainItem_Return(pydantic_v1.BaseModel): + return_: typing.Dict[str, CommonPyExpression] = pydantic_v1.Field(alias="return") + kind: typing.Literal["return"] = pydantic_v1.Field(alias="kind_", default="return") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksPatchTaskRequestMainItem_Get(pydantic_v1.BaseModel): + get: str + kind: typing.Literal["get"] = pydantic_v1.Field(alias="kind_", default="get") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksPatchTaskRequestMainItem_Set(pydantic_v1.BaseModel): + set_: TasksSetStepSet = pydantic_v1.Field(alias="set") + kind: typing.Literal["set"] = pydantic_v1.Field(alias="kind_", default="set") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksPatchTaskRequestMainItem_Log(pydantic_v1.BaseModel): + log: CommonPyExpression + kind: typing.Literal["log"] = pydantic_v1.Field(alias="kind_", default="log") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksPatchTaskRequestMainItem_Embed(pydantic_v1.BaseModel): + embed: DocsEmbedQueryRequest + kind: typing.Literal["embed"] = pydantic_v1.Field(alias="kind_", default="embed") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksPatchTaskRequestMainItem_Search(pydantic_v1.BaseModel): + search: TasksSearchStepSearch + kind: typing.Literal["search"] = pydantic_v1.Field(alias="kind_", default="search") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + class TasksPatchTaskRequestMainItem_WaitForInput(pydantic_v1.BaseModel): info: TasksWaitForInputStepInfo kind: typing.Literal["wait_for_input"] = pydantic_v1.Field( @@ -295,12 +569,182 @@ class Config: json_encoders = {dt.datetime: serialize_datetime} +class TasksPatchTaskRequestMainItem_Switch(pydantic_v1.BaseModel): + switch: typing.List[TasksCaseThen] + kind: typing.Literal["switch"] = pydantic_v1.Field(alias="kind_", default="switch") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksPatchTaskRequestMainItem_Foreach(pydantic_v1.BaseModel): + foreach: TasksForeachDo + kind: typing.Literal["foreach"] = pydantic_v1.Field( + alias="kind_", default="foreach" + ) + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksPatchTaskRequestMainItem_Parallel(pydantic_v1.BaseModel): + parallel: typing.List[TasksParallelStepParallelItem] + kind: typing.Literal["parallel"] = pydantic_v1.Field( + alias="kind_", default="parallel" + ) + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksPatchTaskRequestMainItem_MapReduce(pydantic_v1.BaseModel): + map_: TasksMapOver = pydantic_v1.Field(alias="map") + reduce: CommonPyExpression + kind: typing.Literal["map_reduce"] = pydantic_v1.Field( + alias="kind_", default="map_reduce" + ) + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + TasksPatchTaskRequestMainItem = typing.Union[ TasksPatchTaskRequestMainItem_Evaluate, TasksPatchTaskRequestMainItem_ToolCall, TasksPatchTaskRequestMainItem_Yield, TasksPatchTaskRequestMainItem_Prompt, TasksPatchTaskRequestMainItem_Error, + TasksPatchTaskRequestMainItem_Sleep, + TasksPatchTaskRequestMainItem_Return, + TasksPatchTaskRequestMainItem_Get, + TasksPatchTaskRequestMainItem_Set, + TasksPatchTaskRequestMainItem_Log, + TasksPatchTaskRequestMainItem_Embed, + TasksPatchTaskRequestMainItem_Search, TasksPatchTaskRequestMainItem_WaitForInput, TasksPatchTaskRequestMainItem_IfElse, + TasksPatchTaskRequestMainItem_Switch, + TasksPatchTaskRequestMainItem_Foreach, + TasksPatchTaskRequestMainItem_Parallel, + TasksPatchTaskRequestMainItem_MapReduce, ] diff --git a/sdks/python/julep/api/types/tasks_return_step.py b/sdks/python/julep/api/types/tasks_return_step.py new file mode 100644 index 000000000..33aed561e --- /dev/null +++ b/sdks/python/julep/api/types/tasks_return_step.py @@ -0,0 +1,49 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing + +from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +from .common_py_expression import CommonPyExpression +from .tasks_base_workflow_step import TasksBaseWorkflowStep + + +class TasksReturnStep(TasksBaseWorkflowStep): + return_: typing.Dict[str, CommonPyExpression] = pydantic_v1.Field(alias="return") + """ + The value to return + """ + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} diff --git a/sdks/python/julep/api/types/tasks_search_step.py b/sdks/python/julep/api/types/tasks_search_step.py new file mode 100644 index 000000000..82d0a1658 --- /dev/null +++ b/sdks/python/julep/api/types/tasks_search_step.py @@ -0,0 +1,49 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing + +from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +from .tasks_base_workflow_step import TasksBaseWorkflowStep +from .tasks_search_step_search import TasksSearchStepSearch + + +class TasksSearchStep(TasksBaseWorkflowStep): + search: TasksSearchStepSearch = pydantic_v1.Field() + """ + The search query + """ + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} diff --git a/sdks/python/julep/api/types/tasks_search_step_search.py b/sdks/python/julep/api/types/tasks_search_step_search.py new file mode 100644 index 000000000..678e79d5f --- /dev/null +++ b/sdks/python/julep/api/types/tasks_search_step_search.py @@ -0,0 +1,11 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +from .docs_hybrid_doc_search_request import DocsHybridDocSearchRequest +from .docs_text_only_doc_search_request import DocsTextOnlyDocSearchRequest +from .docs_vector_doc_search_request import DocsVectorDocSearchRequest + +TasksSearchStepSearch = typing.Union[ + DocsVectorDocSearchRequest, DocsTextOnlyDocSearchRequest, DocsHybridDocSearchRequest +] diff --git a/sdks/python/julep/api/types/tasks_set_key.py b/sdks/python/julep/api/types/tasks_set_key.py new file mode 100644 index 000000000..c925599a2 --- /dev/null +++ b/sdks/python/julep/api/types/tasks_set_key.py @@ -0,0 +1,51 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing + +from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +from .common_py_expression import CommonPyExpression + + +class TasksSetKey(pydantic_v1.BaseModel): + key: str = pydantic_v1.Field() + """ + The key to set + """ + + value: CommonPyExpression = pydantic_v1.Field() + """ + The value to set + """ + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} diff --git a/sdks/python/julep/api/types/tasks_set_step.py b/sdks/python/julep/api/types/tasks_set_step.py new file mode 100644 index 000000000..c718d4e7a --- /dev/null +++ b/sdks/python/julep/api/types/tasks_set_step.py @@ -0,0 +1,49 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing + +from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +from .tasks_base_workflow_step import TasksBaseWorkflowStep +from .tasks_set_step_set import TasksSetStepSet + + +class TasksSetStep(TasksBaseWorkflowStep): + set_: TasksSetStepSet = pydantic_v1.Field(alias="set") + """ + The value to set + """ + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} diff --git a/sdks/python/julep/api/types/tasks_set_step_set.py b/sdks/python/julep/api/types/tasks_set_step_set.py new file mode 100644 index 000000000..0b5c955c1 --- /dev/null +++ b/sdks/python/julep/api/types/tasks_set_step_set.py @@ -0,0 +1,7 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +from .tasks_set_key import TasksSetKey + +TasksSetStepSet = typing.Union[TasksSetKey, typing.List[TasksSetKey]] diff --git a/sdks/python/julep/api/types/tasks_sleep_for.py b/sdks/python/julep/api/types/tasks_sleep_for.py new file mode 100644 index 000000000..44a3acd32 --- /dev/null +++ b/sdks/python/julep/api/types/tasks_sleep_for.py @@ -0,0 +1,60 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing + +from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 + + +class TasksSleepFor(pydantic_v1.BaseModel): + seconds: int = pydantic_v1.Field() + """ + The number of seconds to sleep for + """ + + minutes: int = pydantic_v1.Field() + """ + The number of minutes to sleep for + """ + + hours: int = pydantic_v1.Field() + """ + The number of hours to sleep for + """ + + days: int = pydantic_v1.Field() + """ + The number of days to sleep for + """ + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} diff --git a/sdks/python/julep/api/types/tasks_sleep_step.py b/sdks/python/julep/api/types/tasks_sleep_step.py new file mode 100644 index 000000000..5261bad5f --- /dev/null +++ b/sdks/python/julep/api/types/tasks_sleep_step.py @@ -0,0 +1,49 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing + +from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +from .tasks_base_workflow_step import TasksBaseWorkflowStep +from .tasks_sleep_for import TasksSleepFor + + +class TasksSleepStep(TasksBaseWorkflowStep): + sleep: TasksSleepFor = pydantic_v1.Field() + """ + The duration to sleep for + """ + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} diff --git a/sdks/python/julep/api/types/tasks_switch_step.py b/sdks/python/julep/api/types/tasks_switch_step.py new file mode 100644 index 000000000..0a83a8185 --- /dev/null +++ b/sdks/python/julep/api/types/tasks_switch_step.py @@ -0,0 +1,49 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing + +from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +from .tasks_base_workflow_step import TasksBaseWorkflowStep +from .tasks_case_then import TasksCaseThen + + +class TasksSwitchStep(TasksBaseWorkflowStep): + switch: typing.List[TasksCaseThen] = pydantic_v1.Field() + """ + The cond tree + """ + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} diff --git a/sdks/python/julep/api/types/tasks_task_main_item.py b/sdks/python/julep/api/types/tasks_task_main_item.py index 41cda3d7d..60e7aed65 100644 --- a/sdks/python/julep/api/types/tasks_task_main_item.py +++ b/sdks/python/julep/api/types/tasks_task_main_item.py @@ -10,9 +10,17 @@ from .chat_chat_settings import ChatChatSettings from .common_py_expression import CommonPyExpression from .common_tool_ref import CommonToolRef +from .docs_embed_query_request import DocsEmbedQueryRequest +from .tasks_case_then import TasksCaseThen +from .tasks_foreach_do import TasksForeachDo from .tasks_if_else_workflow_step_else import TasksIfElseWorkflowStepElse from .tasks_if_else_workflow_step_then import TasksIfElseWorkflowStepThen +from .tasks_map_over import TasksMapOver +from .tasks_parallel_step_parallel_item import TasksParallelStepParallelItem from .tasks_prompt_step_prompt import TasksPromptStepPrompt +from .tasks_search_step_search import TasksSearchStepSearch +from .tasks_set_step_set import TasksSetStepSet +from .tasks_sleep_for import TasksSleepFor from .tasks_wait_for_input_step_info import TasksWaitForInputStepInfo @@ -213,6 +221,272 @@ class Config: json_encoders = {dt.datetime: serialize_datetime} +class TasksTaskMainItem_Sleep(pydantic_v1.BaseModel): + sleep: TasksSleepFor + kind: typing.Literal["sleep"] = pydantic_v1.Field(alias="kind_", default="sleep") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksTaskMainItem_Return(pydantic_v1.BaseModel): + return_: typing.Dict[str, CommonPyExpression] = pydantic_v1.Field(alias="return") + kind: typing.Literal["return"] = pydantic_v1.Field(alias="kind_", default="return") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksTaskMainItem_Get(pydantic_v1.BaseModel): + get: str + kind: typing.Literal["get"] = pydantic_v1.Field(alias="kind_", default="get") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksTaskMainItem_Set(pydantic_v1.BaseModel): + set_: TasksSetStepSet = pydantic_v1.Field(alias="set") + kind: typing.Literal["set"] = pydantic_v1.Field(alias="kind_", default="set") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksTaskMainItem_Log(pydantic_v1.BaseModel): + log: CommonPyExpression + kind: typing.Literal["log"] = pydantic_v1.Field(alias="kind_", default="log") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksTaskMainItem_Embed(pydantic_v1.BaseModel): + embed: DocsEmbedQueryRequest + kind: typing.Literal["embed"] = pydantic_v1.Field(alias="kind_", default="embed") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksTaskMainItem_Search(pydantic_v1.BaseModel): + search: TasksSearchStepSearch + kind: typing.Literal["search"] = pydantic_v1.Field(alias="kind_", default="search") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + class TasksTaskMainItem_WaitForInput(pydantic_v1.BaseModel): info: TasksWaitForInputStepInfo kind: typing.Literal["wait_for_input"] = pydantic_v1.Field( @@ -295,12 +569,182 @@ class Config: json_encoders = {dt.datetime: serialize_datetime} +class TasksTaskMainItem_Switch(pydantic_v1.BaseModel): + switch: typing.List[TasksCaseThen] + kind: typing.Literal["switch"] = pydantic_v1.Field(alias="kind_", default="switch") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksTaskMainItem_Foreach(pydantic_v1.BaseModel): + foreach: TasksForeachDo + kind: typing.Literal["foreach"] = pydantic_v1.Field( + alias="kind_", default="foreach" + ) + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksTaskMainItem_Parallel(pydantic_v1.BaseModel): + parallel: typing.List[TasksParallelStepParallelItem] + kind: typing.Literal["parallel"] = pydantic_v1.Field( + alias="kind_", default="parallel" + ) + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksTaskMainItem_MapReduce(pydantic_v1.BaseModel): + map_: TasksMapOver = pydantic_v1.Field(alias="map") + reduce: CommonPyExpression + kind: typing.Literal["map_reduce"] = pydantic_v1.Field( + alias="kind_", default="map_reduce" + ) + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + TasksTaskMainItem = typing.Union[ TasksTaskMainItem_Evaluate, TasksTaskMainItem_ToolCall, TasksTaskMainItem_Yield, TasksTaskMainItem_Prompt, TasksTaskMainItem_Error, + TasksTaskMainItem_Sleep, + TasksTaskMainItem_Return, + TasksTaskMainItem_Get, + TasksTaskMainItem_Set, + TasksTaskMainItem_Log, + TasksTaskMainItem_Embed, + TasksTaskMainItem_Search, TasksTaskMainItem_WaitForInput, TasksTaskMainItem_IfElse, + TasksTaskMainItem_Switch, + TasksTaskMainItem_Foreach, + TasksTaskMainItem_Parallel, + TasksTaskMainItem_MapReduce, ] diff --git a/sdks/python/julep/api/types/tasks_update_task_request_main_item.py b/sdks/python/julep/api/types/tasks_update_task_request_main_item.py index 730545283..442c37707 100644 --- a/sdks/python/julep/api/types/tasks_update_task_request_main_item.py +++ b/sdks/python/julep/api/types/tasks_update_task_request_main_item.py @@ -10,9 +10,17 @@ from .chat_chat_settings import ChatChatSettings from .common_py_expression import CommonPyExpression from .common_tool_ref import CommonToolRef +from .docs_embed_query_request import DocsEmbedQueryRequest +from .tasks_case_then import TasksCaseThen +from .tasks_foreach_do import TasksForeachDo from .tasks_if_else_workflow_step_else import TasksIfElseWorkflowStepElse from .tasks_if_else_workflow_step_then import TasksIfElseWorkflowStepThen +from .tasks_map_over import TasksMapOver +from .tasks_parallel_step_parallel_item import TasksParallelStepParallelItem from .tasks_prompt_step_prompt import TasksPromptStepPrompt +from .tasks_search_step_search import TasksSearchStepSearch +from .tasks_set_step_set import TasksSetStepSet +from .tasks_sleep_for import TasksSleepFor from .tasks_wait_for_input_step_info import TasksWaitForInputStepInfo @@ -213,6 +221,272 @@ class Config: json_encoders = {dt.datetime: serialize_datetime} +class TasksUpdateTaskRequestMainItem_Sleep(pydantic_v1.BaseModel): + sleep: TasksSleepFor + kind: typing.Literal["sleep"] = pydantic_v1.Field(alias="kind_", default="sleep") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksUpdateTaskRequestMainItem_Return(pydantic_v1.BaseModel): + return_: typing.Dict[str, CommonPyExpression] = pydantic_v1.Field(alias="return") + kind: typing.Literal["return"] = pydantic_v1.Field(alias="kind_", default="return") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksUpdateTaskRequestMainItem_Get(pydantic_v1.BaseModel): + get: str + kind: typing.Literal["get"] = pydantic_v1.Field(alias="kind_", default="get") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksUpdateTaskRequestMainItem_Set(pydantic_v1.BaseModel): + set_: TasksSetStepSet = pydantic_v1.Field(alias="set") + kind: typing.Literal["set"] = pydantic_v1.Field(alias="kind_", default="set") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksUpdateTaskRequestMainItem_Log(pydantic_v1.BaseModel): + log: CommonPyExpression + kind: typing.Literal["log"] = pydantic_v1.Field(alias="kind_", default="log") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksUpdateTaskRequestMainItem_Embed(pydantic_v1.BaseModel): + embed: DocsEmbedQueryRequest + kind: typing.Literal["embed"] = pydantic_v1.Field(alias="kind_", default="embed") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksUpdateTaskRequestMainItem_Search(pydantic_v1.BaseModel): + search: TasksSearchStepSearch + kind: typing.Literal["search"] = pydantic_v1.Field(alias="kind_", default="search") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + class TasksUpdateTaskRequestMainItem_WaitForInput(pydantic_v1.BaseModel): info: TasksWaitForInputStepInfo kind: typing.Literal["wait_for_input"] = pydantic_v1.Field( @@ -295,12 +569,182 @@ class Config: json_encoders = {dt.datetime: serialize_datetime} +class TasksUpdateTaskRequestMainItem_Switch(pydantic_v1.BaseModel): + switch: typing.List[TasksCaseThen] + kind: typing.Literal["switch"] = pydantic_v1.Field(alias="kind_", default="switch") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksUpdateTaskRequestMainItem_Foreach(pydantic_v1.BaseModel): + foreach: TasksForeachDo + kind: typing.Literal["foreach"] = pydantic_v1.Field( + alias="kind_", default="foreach" + ) + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksUpdateTaskRequestMainItem_Parallel(pydantic_v1.BaseModel): + parallel: typing.List[TasksParallelStepParallelItem] + kind: typing.Literal["parallel"] = pydantic_v1.Field( + alias="kind_", default="parallel" + ) + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksUpdateTaskRequestMainItem_MapReduce(pydantic_v1.BaseModel): + map_: TasksMapOver = pydantic_v1.Field(alias="map") + reduce: CommonPyExpression + kind: typing.Literal["map_reduce"] = pydantic_v1.Field( + alias="kind_", default="map_reduce" + ) + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + TasksUpdateTaskRequestMainItem = typing.Union[ TasksUpdateTaskRequestMainItem_Evaluate, TasksUpdateTaskRequestMainItem_ToolCall, TasksUpdateTaskRequestMainItem_Yield, TasksUpdateTaskRequestMainItem_Prompt, TasksUpdateTaskRequestMainItem_Error, + TasksUpdateTaskRequestMainItem_Sleep, + TasksUpdateTaskRequestMainItem_Return, + TasksUpdateTaskRequestMainItem_Get, + TasksUpdateTaskRequestMainItem_Set, + TasksUpdateTaskRequestMainItem_Log, + TasksUpdateTaskRequestMainItem_Embed, + TasksUpdateTaskRequestMainItem_Search, TasksUpdateTaskRequestMainItem_WaitForInput, TasksUpdateTaskRequestMainItem_IfElse, + TasksUpdateTaskRequestMainItem_Switch, + TasksUpdateTaskRequestMainItem_Foreach, + TasksUpdateTaskRequestMainItem_Parallel, + TasksUpdateTaskRequestMainItem_MapReduce, ] diff --git a/sdks/python/poetry.lock b/sdks/python/poetry.lock index 9b612ca30..f444f51a8 100644 --- a/sdks/python/poetry.lock +++ b/sdks/python/poetry.lock @@ -848,13 +848,13 @@ test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "p [[package]] name = "importlib-resources" -version = "6.4.2" +version = "6.4.3" description = "Read resources from Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "importlib_resources-6.4.2-py3-none-any.whl", hash = "sha256:8bba8c54a8a3afaa1419910845fa26ebd706dc716dd208d9b158b4b6966f5c5c"}, - {file = "importlib_resources-6.4.2.tar.gz", hash = "sha256:6cbfbefc449cc6e2095dd184691b7a12a04f40bc75dd4c55d31c34f174cdf57a"}, + {file = "importlib_resources-6.4.3-py3-none-any.whl", hash = "sha256:2d6dfe3b9e055f72495c2085890837fc8c758984e209115c8792bddcb762cd93"}, + {file = "importlib_resources-6.4.3.tar.gz", hash = "sha256:4a202b9b9d38563b46da59221d77bb73862ab5d79d461307bcb826d725448b98"}, ] [package.dependencies] @@ -1751,13 +1751,13 @@ files = [ [[package]] name = "openai" -version = "1.40.8" +version = "1.41.0" description = "The official Python library for the openai API" optional = false python-versions = ">=3.7.1" files = [ - {file = "openai-1.40.8-py3-none-any.whl", hash = "sha256:3ed4ddad48e0dde059c9b4d3dc240e47781beca2811e52ba449ddc4a471a2fd4"}, - {file = "openai-1.40.8.tar.gz", hash = "sha256:e225f830b946378e214c5b2cfa8df28ba2aeb7e9d44f738cb2a926fd971f5bc0"}, + {file = "openai-1.41.0-py3-none-any.whl", hash = "sha256:3b6cca4571667f3e0800442ef8f2bfa6a6f3301c51776bc7626159a4d81c242c"}, + {file = "openai-1.41.0.tar.gz", hash = "sha256:26b81f39b49dce92ff5d30c373625ddb212c2f1050e1574e456d18423730cdd0"}, ] [package.dependencies] diff --git a/sdks/ts/src/api/index.ts b/sdks/ts/src/api/index.ts index d92686f43..64ca78e83 100644 --- a/sdks/ts/src/api/index.ts +++ b/sdks/ts/src/api/index.ts @@ -96,13 +96,29 @@ export type { Sessions_SingleAgentNoUserSession } from "./models/Sessions_Single export type { Sessions_SingleAgentSingleUserSession } from "./models/Sessions_SingleAgentSingleUserSession"; export type { Sessions_UpdateSessionRequest } from "./models/Sessions_UpdateSessionRequest"; export type { Tasks_BaseWorkflowStep } from "./models/Tasks_BaseWorkflowStep"; +export type { Tasks_CaseThen } from "./models/Tasks_CaseThen"; export type { Tasks_CreateOrUpdateTaskRequest_id } from "./models/Tasks_CreateOrUpdateTaskRequest_id"; export type { Tasks_CreateTaskRequest } from "./models/Tasks_CreateTaskRequest"; +export type { Tasks_EmbedStep } from "./models/Tasks_EmbedStep"; export type { Tasks_ErrorWorkflowStep } from "./models/Tasks_ErrorWorkflowStep"; export type { Tasks_EvaluateStep } from "./models/Tasks_EvaluateStep"; +export type { Tasks_ForeachDo } from "./models/Tasks_ForeachDo"; +export type { Tasks_ForeachStep } from "./models/Tasks_ForeachStep"; +export type { Tasks_GetStep } from "./models/Tasks_GetStep"; export type { Tasks_IfElseWorkflowStep } from "./models/Tasks_IfElseWorkflowStep"; +export type { Tasks_LogStep } from "./models/Tasks_LogStep"; +export type { Tasks_MapOver } from "./models/Tasks_MapOver"; +export type { Tasks_MapReduceStep } from "./models/Tasks_MapReduceStep"; +export type { Tasks_ParallelStep } from "./models/Tasks_ParallelStep"; export type { Tasks_PatchTaskRequest } from "./models/Tasks_PatchTaskRequest"; export type { Tasks_PromptStep } from "./models/Tasks_PromptStep"; +export type { Tasks_ReturnStep } from "./models/Tasks_ReturnStep"; +export type { Tasks_SearchStep } from "./models/Tasks_SearchStep"; +export type { Tasks_SetKey } from "./models/Tasks_SetKey"; +export type { Tasks_SetStep } from "./models/Tasks_SetStep"; +export type { Tasks_SleepFor } from "./models/Tasks_SleepFor"; +export type { Tasks_SleepStep } from "./models/Tasks_SleepStep"; +export type { Tasks_SwitchStep } from "./models/Tasks_SwitchStep"; export type { Tasks_Task } from "./models/Tasks_Task"; export type { Tasks_TaskTool } from "./models/Tasks_TaskTool"; export type { Tasks_ToolCallStep } from "./models/Tasks_ToolCallStep"; @@ -215,13 +231,29 @@ export { $Sessions_SingleAgentNoUserSession } from "./schemas/$Sessions_SingleAg export { $Sessions_SingleAgentSingleUserSession } from "./schemas/$Sessions_SingleAgentSingleUserSession"; export { $Sessions_UpdateSessionRequest } from "./schemas/$Sessions_UpdateSessionRequest"; export { $Tasks_BaseWorkflowStep } from "./schemas/$Tasks_BaseWorkflowStep"; +export { $Tasks_CaseThen } from "./schemas/$Tasks_CaseThen"; export { $Tasks_CreateOrUpdateTaskRequest_id } from "./schemas/$Tasks_CreateOrUpdateTaskRequest_id"; export { $Tasks_CreateTaskRequest } from "./schemas/$Tasks_CreateTaskRequest"; +export { $Tasks_EmbedStep } from "./schemas/$Tasks_EmbedStep"; export { $Tasks_ErrorWorkflowStep } from "./schemas/$Tasks_ErrorWorkflowStep"; export { $Tasks_EvaluateStep } from "./schemas/$Tasks_EvaluateStep"; +export { $Tasks_ForeachDo } from "./schemas/$Tasks_ForeachDo"; +export { $Tasks_ForeachStep } from "./schemas/$Tasks_ForeachStep"; +export { $Tasks_GetStep } from "./schemas/$Tasks_GetStep"; export { $Tasks_IfElseWorkflowStep } from "./schemas/$Tasks_IfElseWorkflowStep"; +export { $Tasks_LogStep } from "./schemas/$Tasks_LogStep"; +export { $Tasks_MapOver } from "./schemas/$Tasks_MapOver"; +export { $Tasks_MapReduceStep } from "./schemas/$Tasks_MapReduceStep"; +export { $Tasks_ParallelStep } from "./schemas/$Tasks_ParallelStep"; export { $Tasks_PatchTaskRequest } from "./schemas/$Tasks_PatchTaskRequest"; export { $Tasks_PromptStep } from "./schemas/$Tasks_PromptStep"; +export { $Tasks_ReturnStep } from "./schemas/$Tasks_ReturnStep"; +export { $Tasks_SearchStep } from "./schemas/$Tasks_SearchStep"; +export { $Tasks_SetKey } from "./schemas/$Tasks_SetKey"; +export { $Tasks_SetStep } from "./schemas/$Tasks_SetStep"; +export { $Tasks_SleepFor } from "./schemas/$Tasks_SleepFor"; +export { $Tasks_SleepStep } from "./schemas/$Tasks_SleepStep"; +export { $Tasks_SwitchStep } from "./schemas/$Tasks_SwitchStep"; export { $Tasks_Task } from "./schemas/$Tasks_Task"; export { $Tasks_TaskTool } from "./schemas/$Tasks_TaskTool"; export { $Tasks_ToolCallStep } from "./schemas/$Tasks_ToolCallStep"; diff --git a/sdks/ts/src/api/models/Tasks_BaseWorkflowStep.ts b/sdks/ts/src/api/models/Tasks_BaseWorkflowStep.ts index c399bcaa8..10be91186 100644 --- a/sdks/ts/src/api/models/Tasks_BaseWorkflowStep.ts +++ b/sdks/ts/src/api/models/Tasks_BaseWorkflowStep.ts @@ -8,10 +8,21 @@ export type Tasks_BaseWorkflowStep = { */ kind_: | "tool_call" - | "yield" | "prompt" | "evaluate" - | "if_else" | "wait_for_input" + | "log" + | "embed" + | "search" + | "set" + | "get" + | "foreach" + | "map_reduce" + | "parallel" + | "switch" + | "if_else" + | "sleep" + | "return" + | "yield" | "error"; }; diff --git a/sdks/ts/src/api/models/Tasks_CaseThen.ts b/sdks/ts/src/api/models/Tasks_CaseThen.ts new file mode 100644 index 000000000..43bc0d588 --- /dev/null +++ b/sdks/ts/src/api/models/Tasks_CaseThen.ts @@ -0,0 +1,39 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +import type { Common_PyExpression } from "./Common_PyExpression"; +import type { Tasks_EmbedStep } from "./Tasks_EmbedStep"; +import type { Tasks_ErrorWorkflowStep } from "./Tasks_ErrorWorkflowStep"; +import type { Tasks_GetStep } from "./Tasks_GetStep"; +import type { Tasks_LogStep } from "./Tasks_LogStep"; +import type { Tasks_PromptStep } from "./Tasks_PromptStep"; +import type { Tasks_ReturnStep } from "./Tasks_ReturnStep"; +import type { Tasks_SearchStep } from "./Tasks_SearchStep"; +import type { Tasks_SetStep } from "./Tasks_SetStep"; +import type { Tasks_SleepStep } from "./Tasks_SleepStep"; +import type { Tasks_ToolCallStep } from "./Tasks_ToolCallStep"; +import type { Tasks_WaitForInputStep } from "./Tasks_WaitForInputStep"; +import type { Tasks_YieldStep } from "./Tasks_YieldStep"; +export type Tasks_CaseThen = { + /** + * The condition to evaluate + */ + case: Common_PyExpression; + /** + * The steps to run if the condition is true + */ + then: + | Tasks_ToolCallStep + | Tasks_YieldStep + | Tasks_PromptStep + | Tasks_ErrorWorkflowStep + | Tasks_SleepStep + | Tasks_ReturnStep + | Tasks_GetStep + | Tasks_SetStep + | Tasks_LogStep + | Tasks_EmbedStep + | Tasks_SearchStep + | Tasks_WaitForInputStep; +}; diff --git a/sdks/ts/src/api/models/Tasks_CreateTaskRequest.ts b/sdks/ts/src/api/models/Tasks_CreateTaskRequest.ts index 369cae664..274f27b8a 100644 --- a/sdks/ts/src/api/models/Tasks_CreateTaskRequest.ts +++ b/sdks/ts/src/api/models/Tasks_CreateTaskRequest.ts @@ -2,10 +2,21 @@ /* istanbul ignore file */ /* tslint:disable */ /* eslint-disable */ +import type { Tasks_EmbedStep } from "./Tasks_EmbedStep"; import type { Tasks_ErrorWorkflowStep } from "./Tasks_ErrorWorkflowStep"; import type { Tasks_EvaluateStep } from "./Tasks_EvaluateStep"; +import type { Tasks_ForeachStep } from "./Tasks_ForeachStep"; +import type { Tasks_GetStep } from "./Tasks_GetStep"; import type { Tasks_IfElseWorkflowStep } from "./Tasks_IfElseWorkflowStep"; +import type { Tasks_LogStep } from "./Tasks_LogStep"; +import type { Tasks_MapReduceStep } from "./Tasks_MapReduceStep"; +import type { Tasks_ParallelStep } from "./Tasks_ParallelStep"; import type { Tasks_PromptStep } from "./Tasks_PromptStep"; +import type { Tasks_ReturnStep } from "./Tasks_ReturnStep"; +import type { Tasks_SearchStep } from "./Tasks_SearchStep"; +import type { Tasks_SetStep } from "./Tasks_SetStep"; +import type { Tasks_SleepStep } from "./Tasks_SleepStep"; +import type { Tasks_SwitchStep } from "./Tasks_SwitchStep"; import type { Tasks_ToolCallStep } from "./Tasks_ToolCallStep"; import type { Tasks_WaitForInputStep } from "./Tasks_WaitForInputStep"; import type { Tasks_YieldStep } from "./Tasks_YieldStep"; @@ -20,7 +31,18 @@ export type Tasks_CreateTaskRequest = Record< | Tasks_YieldStep | Tasks_PromptStep | Tasks_ErrorWorkflowStep + | Tasks_SleepStep + | Tasks_ReturnStep + | Tasks_GetStep + | Tasks_SetStep + | Tasks_LogStep + | Tasks_EmbedStep + | Tasks_SearchStep | Tasks_WaitForInputStep | Tasks_IfElseWorkflowStep + | Tasks_SwitchStep + | Tasks_ForeachStep + | Tasks_ParallelStep + | Tasks_MapReduceStep > >; diff --git a/sdks/ts/src/api/models/Tasks_EmbedStep.ts b/sdks/ts/src/api/models/Tasks_EmbedStep.ts new file mode 100644 index 000000000..2036ceead --- /dev/null +++ b/sdks/ts/src/api/models/Tasks_EmbedStep.ts @@ -0,0 +1,13 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +import type { Docs_EmbedQueryRequest } from "./Docs_EmbedQueryRequest"; +import type { Tasks_BaseWorkflowStep } from "./Tasks_BaseWorkflowStep"; +export type Tasks_EmbedStep = Tasks_BaseWorkflowStep & { + kind_: "embed"; + /** + * The text to embed + */ + embed: Docs_EmbedQueryRequest; +}; diff --git a/sdks/ts/src/api/models/Tasks_ForeachDo.ts b/sdks/ts/src/api/models/Tasks_ForeachDo.ts new file mode 100644 index 000000000..f035536f5 --- /dev/null +++ b/sdks/ts/src/api/models/Tasks_ForeachDo.ts @@ -0,0 +1,40 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +import type { Common_PyExpression } from "./Common_PyExpression"; +import type { Tasks_EmbedStep } from "./Tasks_EmbedStep"; +import type { Tasks_ErrorWorkflowStep } from "./Tasks_ErrorWorkflowStep"; +import type { Tasks_GetStep } from "./Tasks_GetStep"; +import type { Tasks_LogStep } from "./Tasks_LogStep"; +import type { Tasks_PromptStep } from "./Tasks_PromptStep"; +import type { Tasks_ReturnStep } from "./Tasks_ReturnStep"; +import type { Tasks_SearchStep } from "./Tasks_SearchStep"; +import type { Tasks_SetStep } from "./Tasks_SetStep"; +import type { Tasks_SleepStep } from "./Tasks_SleepStep"; +import type { Tasks_ToolCallStep } from "./Tasks_ToolCallStep"; +import type { Tasks_WaitForInputStep } from "./Tasks_WaitForInputStep"; +import type { Tasks_YieldStep } from "./Tasks_YieldStep"; +export type Tasks_ForeachDo = { + /** + * The variable to iterate over + */ + in: Common_PyExpression; + /** + * The steps to run for each iteration + */ + do: Array< + | Tasks_ToolCallStep + | Tasks_YieldStep + | Tasks_PromptStep + | Tasks_ErrorWorkflowStep + | Tasks_SleepStep + | Tasks_ReturnStep + | Tasks_GetStep + | Tasks_SetStep + | Tasks_LogStep + | Tasks_EmbedStep + | Tasks_SearchStep + | Tasks_WaitForInputStep + >; +}; diff --git a/sdks/ts/src/api/models/Tasks_ForeachStep.ts b/sdks/ts/src/api/models/Tasks_ForeachStep.ts new file mode 100644 index 000000000..31989f782 --- /dev/null +++ b/sdks/ts/src/api/models/Tasks_ForeachStep.ts @@ -0,0 +1,13 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +import type { Tasks_BaseWorkflowStep } from "./Tasks_BaseWorkflowStep"; +import type { Tasks_ForeachDo } from "./Tasks_ForeachDo"; +export type Tasks_ForeachStep = Tasks_BaseWorkflowStep & { + kind_: "foreach"; + /** + * The steps to run for each iteration + */ + foreach: Tasks_ForeachDo; +}; diff --git a/sdks/ts/src/api/models/Tasks_GetStep.ts b/sdks/ts/src/api/models/Tasks_GetStep.ts new file mode 100644 index 000000000..a8d20ecbd --- /dev/null +++ b/sdks/ts/src/api/models/Tasks_GetStep.ts @@ -0,0 +1,12 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +import type { Tasks_BaseWorkflowStep } from "./Tasks_BaseWorkflowStep"; +export type Tasks_GetStep = Tasks_BaseWorkflowStep & { + kind_: "get"; + /** + * The key to get + */ + get: string; +}; diff --git a/sdks/ts/src/api/models/Tasks_IfElseWorkflowStep.ts b/sdks/ts/src/api/models/Tasks_IfElseWorkflowStep.ts index d07ce4d07..dc2212143 100644 --- a/sdks/ts/src/api/models/Tasks_IfElseWorkflowStep.ts +++ b/sdks/ts/src/api/models/Tasks_IfElseWorkflowStep.ts @@ -4,8 +4,15 @@ /* eslint-disable */ import type { Common_PyExpression } from "./Common_PyExpression"; import type { Tasks_BaseWorkflowStep } from "./Tasks_BaseWorkflowStep"; +import type { Tasks_EmbedStep } from "./Tasks_EmbedStep"; import type { Tasks_ErrorWorkflowStep } from "./Tasks_ErrorWorkflowStep"; +import type { Tasks_GetStep } from "./Tasks_GetStep"; +import type { Tasks_LogStep } from "./Tasks_LogStep"; import type { Tasks_PromptStep } from "./Tasks_PromptStep"; +import type { Tasks_ReturnStep } from "./Tasks_ReturnStep"; +import type { Tasks_SearchStep } from "./Tasks_SearchStep"; +import type { Tasks_SetStep } from "./Tasks_SetStep"; +import type { Tasks_SleepStep } from "./Tasks_SleepStep"; import type { Tasks_ToolCallStep } from "./Tasks_ToolCallStep"; import type { Tasks_WaitForInputStep } from "./Tasks_WaitForInputStep"; import type { Tasks_YieldStep } from "./Tasks_YieldStep"; @@ -23,6 +30,13 @@ export type Tasks_IfElseWorkflowStep = Tasks_BaseWorkflowStep & { | Tasks_YieldStep | Tasks_PromptStep | Tasks_ErrorWorkflowStep + | Tasks_SleepStep + | Tasks_ReturnStep + | Tasks_GetStep + | Tasks_SetStep + | Tasks_LogStep + | Tasks_EmbedStep + | Tasks_SearchStep | Tasks_WaitForInputStep; /** * The steps to run if the condition is false @@ -32,5 +46,12 @@ export type Tasks_IfElseWorkflowStep = Tasks_BaseWorkflowStep & { | Tasks_YieldStep | Tasks_PromptStep | Tasks_ErrorWorkflowStep + | Tasks_SleepStep + | Tasks_ReturnStep + | Tasks_GetStep + | Tasks_SetStep + | Tasks_LogStep + | Tasks_EmbedStep + | Tasks_SearchStep | Tasks_WaitForInputStep; }; diff --git a/sdks/ts/src/api/models/Tasks_LogStep.ts b/sdks/ts/src/api/models/Tasks_LogStep.ts new file mode 100644 index 000000000..483628b36 --- /dev/null +++ b/sdks/ts/src/api/models/Tasks_LogStep.ts @@ -0,0 +1,13 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +import type { Common_PyExpression } from "./Common_PyExpression"; +import type { Tasks_BaseWorkflowStep } from "./Tasks_BaseWorkflowStep"; +export type Tasks_LogStep = Tasks_BaseWorkflowStep & { + kind_: "log"; + /** + * The value to log + */ + log: Common_PyExpression; +}; diff --git a/sdks/ts/src/api/models/Tasks_MapOver.ts b/sdks/ts/src/api/models/Tasks_MapOver.ts new file mode 100644 index 000000000..d293474c3 --- /dev/null +++ b/sdks/ts/src/api/models/Tasks_MapOver.ts @@ -0,0 +1,15 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +import type { Common_PyExpression } from "./Common_PyExpression"; +export type Tasks_MapOver = { + /** + * The variable to iterate over + */ + over: Common_PyExpression; + /** + * The subworkflow to run for each iteration + */ + workflow: string; +}; diff --git a/sdks/ts/src/api/models/Tasks_MapReduceStep.ts b/sdks/ts/src/api/models/Tasks_MapReduceStep.ts new file mode 100644 index 000000000..be542f460 --- /dev/null +++ b/sdks/ts/src/api/models/Tasks_MapReduceStep.ts @@ -0,0 +1,18 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +import type { Common_PyExpression } from "./Common_PyExpression"; +import type { Tasks_BaseWorkflowStep } from "./Tasks_BaseWorkflowStep"; +import type { Tasks_MapOver } from "./Tasks_MapOver"; +export type Tasks_MapReduceStep = Tasks_BaseWorkflowStep & { + kind_: "map_reduce"; + /** + * The steps to run for each iteration + */ + map: Tasks_MapOver; + /** + * The expression to reduce the results (`_` is a list of outputs) + */ + reduce: Common_PyExpression; +}; diff --git a/sdks/ts/src/api/models/Tasks_ParallelStep.ts b/sdks/ts/src/api/models/Tasks_ParallelStep.ts new file mode 100644 index 000000000..a094a3eca --- /dev/null +++ b/sdks/ts/src/api/models/Tasks_ParallelStep.ts @@ -0,0 +1,37 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +import type { Tasks_BaseWorkflowStep } from "./Tasks_BaseWorkflowStep"; +import type { Tasks_EmbedStep } from "./Tasks_EmbedStep"; +import type { Tasks_ErrorWorkflowStep } from "./Tasks_ErrorWorkflowStep"; +import type { Tasks_GetStep } from "./Tasks_GetStep"; +import type { Tasks_LogStep } from "./Tasks_LogStep"; +import type { Tasks_PromptStep } from "./Tasks_PromptStep"; +import type { Tasks_ReturnStep } from "./Tasks_ReturnStep"; +import type { Tasks_SearchStep } from "./Tasks_SearchStep"; +import type { Tasks_SetStep } from "./Tasks_SetStep"; +import type { Tasks_SleepStep } from "./Tasks_SleepStep"; +import type { Tasks_ToolCallStep } from "./Tasks_ToolCallStep"; +import type { Tasks_WaitForInputStep } from "./Tasks_WaitForInputStep"; +import type { Tasks_YieldStep } from "./Tasks_YieldStep"; +export type Tasks_ParallelStep = Tasks_BaseWorkflowStep & { + kind_: "parallel"; + /** + * The steps to run in parallel. Max concurrency will depend on the platform + */ + parallel: Array< + | Tasks_ToolCallStep + | Tasks_YieldStep + | Tasks_PromptStep + | Tasks_ErrorWorkflowStep + | Tasks_SleepStep + | Tasks_ReturnStep + | Tasks_GetStep + | Tasks_SetStep + | Tasks_LogStep + | Tasks_EmbedStep + | Tasks_SearchStep + | Tasks_WaitForInputStep + >; +}; diff --git a/sdks/ts/src/api/models/Tasks_PatchTaskRequest.ts b/sdks/ts/src/api/models/Tasks_PatchTaskRequest.ts index c5493e70c..0b5b917e1 100644 --- a/sdks/ts/src/api/models/Tasks_PatchTaskRequest.ts +++ b/sdks/ts/src/api/models/Tasks_PatchTaskRequest.ts @@ -2,10 +2,21 @@ /* istanbul ignore file */ /* tslint:disable */ /* eslint-disable */ +import type { Tasks_EmbedStep } from "./Tasks_EmbedStep"; import type { Tasks_ErrorWorkflowStep } from "./Tasks_ErrorWorkflowStep"; import type { Tasks_EvaluateStep } from "./Tasks_EvaluateStep"; +import type { Tasks_ForeachStep } from "./Tasks_ForeachStep"; +import type { Tasks_GetStep } from "./Tasks_GetStep"; import type { Tasks_IfElseWorkflowStep } from "./Tasks_IfElseWorkflowStep"; +import type { Tasks_LogStep } from "./Tasks_LogStep"; +import type { Tasks_MapReduceStep } from "./Tasks_MapReduceStep"; +import type { Tasks_ParallelStep } from "./Tasks_ParallelStep"; import type { Tasks_PromptStep } from "./Tasks_PromptStep"; +import type { Tasks_ReturnStep } from "./Tasks_ReturnStep"; +import type { Tasks_SearchStep } from "./Tasks_SearchStep"; +import type { Tasks_SetStep } from "./Tasks_SetStep"; +import type { Tasks_SleepStep } from "./Tasks_SleepStep"; +import type { Tasks_SwitchStep } from "./Tasks_SwitchStep"; import type { Tasks_ToolCallStep } from "./Tasks_ToolCallStep"; import type { Tasks_WaitForInputStep } from "./Tasks_WaitForInputStep"; import type { Tasks_YieldStep } from "./Tasks_YieldStep"; @@ -20,7 +31,18 @@ export type Tasks_PatchTaskRequest = Record< | Tasks_YieldStep | Tasks_PromptStep | Tasks_ErrorWorkflowStep + | Tasks_SleepStep + | Tasks_ReturnStep + | Tasks_GetStep + | Tasks_SetStep + | Tasks_LogStep + | Tasks_EmbedStep + | Tasks_SearchStep | Tasks_WaitForInputStep | Tasks_IfElseWorkflowStep + | Tasks_SwitchStep + | Tasks_ForeachStep + | Tasks_ParallelStep + | Tasks_MapReduceStep > >; diff --git a/sdks/ts/src/api/models/Tasks_ReturnStep.ts b/sdks/ts/src/api/models/Tasks_ReturnStep.ts new file mode 100644 index 000000000..97488f129 --- /dev/null +++ b/sdks/ts/src/api/models/Tasks_ReturnStep.ts @@ -0,0 +1,13 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +import type { Common_PyExpression } from "./Common_PyExpression"; +import type { Tasks_BaseWorkflowStep } from "./Tasks_BaseWorkflowStep"; +export type Tasks_ReturnStep = Tasks_BaseWorkflowStep & { + kind_: "return"; + /** + * The value to return + */ + return: Record; +}; diff --git a/sdks/ts/src/api/models/Tasks_SearchStep.ts b/sdks/ts/src/api/models/Tasks_SearchStep.ts new file mode 100644 index 000000000..eabe9b707 --- /dev/null +++ b/sdks/ts/src/api/models/Tasks_SearchStep.ts @@ -0,0 +1,18 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +import type { Docs_HybridDocSearchRequest } from "./Docs_HybridDocSearchRequest"; +import type { Docs_TextOnlyDocSearchRequest } from "./Docs_TextOnlyDocSearchRequest"; +import type { Docs_VectorDocSearchRequest } from "./Docs_VectorDocSearchRequest"; +import type { Tasks_BaseWorkflowStep } from "./Tasks_BaseWorkflowStep"; +export type Tasks_SearchStep = Tasks_BaseWorkflowStep & { + kind_: "search"; + /** + * The search query + */ + search: + | Docs_VectorDocSearchRequest + | Docs_TextOnlyDocSearchRequest + | Docs_HybridDocSearchRequest; +}; diff --git a/sdks/ts/src/api/models/Tasks_SetKey.ts b/sdks/ts/src/api/models/Tasks_SetKey.ts new file mode 100644 index 000000000..a7cef005d --- /dev/null +++ b/sdks/ts/src/api/models/Tasks_SetKey.ts @@ -0,0 +1,15 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +import type { Common_PyExpression } from "./Common_PyExpression"; +export type Tasks_SetKey = { + /** + * The key to set + */ + key: string; + /** + * The value to set + */ + value: Common_PyExpression; +}; diff --git a/sdks/ts/src/api/models/Tasks_SetStep.ts b/sdks/ts/src/api/models/Tasks_SetStep.ts new file mode 100644 index 000000000..a4425ecc3 --- /dev/null +++ b/sdks/ts/src/api/models/Tasks_SetStep.ts @@ -0,0 +1,13 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +import type { Tasks_BaseWorkflowStep } from "./Tasks_BaseWorkflowStep"; +import type { Tasks_SetKey } from "./Tasks_SetKey"; +export type Tasks_SetStep = Tasks_BaseWorkflowStep & { + kind_: "set"; + /** + * The value to set + */ + set: Tasks_SetKey | Array; +}; diff --git a/sdks/ts/src/api/models/Tasks_SleepFor.ts b/sdks/ts/src/api/models/Tasks_SleepFor.ts new file mode 100644 index 000000000..85b1ef55f --- /dev/null +++ b/sdks/ts/src/api/models/Tasks_SleepFor.ts @@ -0,0 +1,22 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export type Tasks_SleepFor = { + /** + * The number of seconds to sleep for + */ + seconds: number; + /** + * The number of minutes to sleep for + */ + minutes: number; + /** + * The number of hours to sleep for + */ + hours: number; + /** + * The number of days to sleep for + */ + days: number; +}; diff --git a/sdks/ts/src/api/models/Tasks_SleepStep.ts b/sdks/ts/src/api/models/Tasks_SleepStep.ts new file mode 100644 index 000000000..c2a14b9b5 --- /dev/null +++ b/sdks/ts/src/api/models/Tasks_SleepStep.ts @@ -0,0 +1,13 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +import type { Tasks_BaseWorkflowStep } from "./Tasks_BaseWorkflowStep"; +import type { Tasks_SleepFor } from "./Tasks_SleepFor"; +export type Tasks_SleepStep = Tasks_BaseWorkflowStep & { + kind_: "sleep"; + /** + * The duration to sleep for + */ + sleep: Tasks_SleepFor; +}; diff --git a/sdks/ts/src/api/models/Tasks_SwitchStep.ts b/sdks/ts/src/api/models/Tasks_SwitchStep.ts new file mode 100644 index 000000000..27d68c6be --- /dev/null +++ b/sdks/ts/src/api/models/Tasks_SwitchStep.ts @@ -0,0 +1,13 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +import type { Tasks_BaseWorkflowStep } from "./Tasks_BaseWorkflowStep"; +import type { Tasks_CaseThen } from "./Tasks_CaseThen"; +export type Tasks_SwitchStep = Tasks_BaseWorkflowStep & { + kind_: "switch"; + /** + * The cond tree + */ + switch: Array; +}; diff --git a/sdks/ts/src/api/models/Tasks_Task.ts b/sdks/ts/src/api/models/Tasks_Task.ts index 9b273fc4a..fe307a4e8 100644 --- a/sdks/ts/src/api/models/Tasks_Task.ts +++ b/sdks/ts/src/api/models/Tasks_Task.ts @@ -2,10 +2,21 @@ /* istanbul ignore file */ /* tslint:disable */ /* eslint-disable */ +import type { Tasks_EmbedStep } from "./Tasks_EmbedStep"; import type { Tasks_ErrorWorkflowStep } from "./Tasks_ErrorWorkflowStep"; import type { Tasks_EvaluateStep } from "./Tasks_EvaluateStep"; +import type { Tasks_ForeachStep } from "./Tasks_ForeachStep"; +import type { Tasks_GetStep } from "./Tasks_GetStep"; import type { Tasks_IfElseWorkflowStep } from "./Tasks_IfElseWorkflowStep"; +import type { Tasks_LogStep } from "./Tasks_LogStep"; +import type { Tasks_MapReduceStep } from "./Tasks_MapReduceStep"; +import type { Tasks_ParallelStep } from "./Tasks_ParallelStep"; import type { Tasks_PromptStep } from "./Tasks_PromptStep"; +import type { Tasks_ReturnStep } from "./Tasks_ReturnStep"; +import type { Tasks_SearchStep } from "./Tasks_SearchStep"; +import type { Tasks_SetStep } from "./Tasks_SetStep"; +import type { Tasks_SleepStep } from "./Tasks_SleepStep"; +import type { Tasks_SwitchStep } from "./Tasks_SwitchStep"; import type { Tasks_ToolCallStep } from "./Tasks_ToolCallStep"; import type { Tasks_WaitForInputStep } from "./Tasks_WaitForInputStep"; import type { Tasks_YieldStep } from "./Tasks_YieldStep"; @@ -20,7 +31,18 @@ export type Tasks_Task = Record< | Tasks_YieldStep | Tasks_PromptStep | Tasks_ErrorWorkflowStep + | Tasks_SleepStep + | Tasks_ReturnStep + | Tasks_GetStep + | Tasks_SetStep + | Tasks_LogStep + | Tasks_EmbedStep + | Tasks_SearchStep | Tasks_WaitForInputStep | Tasks_IfElseWorkflowStep + | Tasks_SwitchStep + | Tasks_ForeachStep + | Tasks_ParallelStep + | Tasks_MapReduceStep > >; diff --git a/sdks/ts/src/api/models/Tasks_UpdateTaskRequest.ts b/sdks/ts/src/api/models/Tasks_UpdateTaskRequest.ts index e823f29a4..a6346d173 100644 --- a/sdks/ts/src/api/models/Tasks_UpdateTaskRequest.ts +++ b/sdks/ts/src/api/models/Tasks_UpdateTaskRequest.ts @@ -2,10 +2,21 @@ /* istanbul ignore file */ /* tslint:disable */ /* eslint-disable */ +import type { Tasks_EmbedStep } from "./Tasks_EmbedStep"; import type { Tasks_ErrorWorkflowStep } from "./Tasks_ErrorWorkflowStep"; import type { Tasks_EvaluateStep } from "./Tasks_EvaluateStep"; +import type { Tasks_ForeachStep } from "./Tasks_ForeachStep"; +import type { Tasks_GetStep } from "./Tasks_GetStep"; import type { Tasks_IfElseWorkflowStep } from "./Tasks_IfElseWorkflowStep"; +import type { Tasks_LogStep } from "./Tasks_LogStep"; +import type { Tasks_MapReduceStep } from "./Tasks_MapReduceStep"; +import type { Tasks_ParallelStep } from "./Tasks_ParallelStep"; import type { Tasks_PromptStep } from "./Tasks_PromptStep"; +import type { Tasks_ReturnStep } from "./Tasks_ReturnStep"; +import type { Tasks_SearchStep } from "./Tasks_SearchStep"; +import type { Tasks_SetStep } from "./Tasks_SetStep"; +import type { Tasks_SleepStep } from "./Tasks_SleepStep"; +import type { Tasks_SwitchStep } from "./Tasks_SwitchStep"; import type { Tasks_ToolCallStep } from "./Tasks_ToolCallStep"; import type { Tasks_WaitForInputStep } from "./Tasks_WaitForInputStep"; import type { Tasks_YieldStep } from "./Tasks_YieldStep"; @@ -20,7 +31,18 @@ export type Tasks_UpdateTaskRequest = Record< | Tasks_YieldStep | Tasks_PromptStep | Tasks_ErrorWorkflowStep + | Tasks_SleepStep + | Tasks_ReturnStep + | Tasks_GetStep + | Tasks_SetStep + | Tasks_LogStep + | Tasks_EmbedStep + | Tasks_SearchStep | Tasks_WaitForInputStep | Tasks_IfElseWorkflowStep + | Tasks_SwitchStep + | Tasks_ForeachStep + | Tasks_ParallelStep + | Tasks_MapReduceStep > >; diff --git a/sdks/ts/src/api/schemas/$Tasks_CaseThen.ts b/sdks/ts/src/api/schemas/$Tasks_CaseThen.ts new file mode 100644 index 000000000..0e8eceebc --- /dev/null +++ b/sdks/ts/src/api/schemas/$Tasks_CaseThen.ts @@ -0,0 +1,61 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $Tasks_CaseThen = { + properties: { + case: { + type: "all-of", + description: `The condition to evaluate`, + contains: [ + { + type: "Common_PyExpression", + }, + ], + isRequired: true, + }, + then: { + type: "any-of", + description: `The steps to run if the condition is true`, + contains: [ + { + type: "Tasks_ToolCallStep", + }, + { + type: "Tasks_YieldStep", + }, + { + type: "Tasks_PromptStep", + }, + { + type: "Tasks_ErrorWorkflowStep", + }, + { + type: "Tasks_SleepStep", + }, + { + type: "Tasks_ReturnStep", + }, + { + type: "Tasks_GetStep", + }, + { + type: "Tasks_SetStep", + }, + { + type: "Tasks_LogStep", + }, + { + type: "Tasks_EmbedStep", + }, + { + type: "Tasks_SearchStep", + }, + { + type: "Tasks_WaitForInputStep", + }, + ], + isRequired: true, + }, + }, +} as const; diff --git a/sdks/ts/src/api/schemas/$Tasks_CreateTaskRequest.ts b/sdks/ts/src/api/schemas/$Tasks_CreateTaskRequest.ts index 7c15a448d..1116b0b5f 100644 --- a/sdks/ts/src/api/schemas/$Tasks_CreateTaskRequest.ts +++ b/sdks/ts/src/api/schemas/$Tasks_CreateTaskRequest.ts @@ -24,12 +24,45 @@ export const $Tasks_CreateTaskRequest = { { type: "Tasks_ErrorWorkflowStep", }, + { + type: "Tasks_SleepStep", + }, + { + type: "Tasks_ReturnStep", + }, + { + type: "Tasks_GetStep", + }, + { + type: "Tasks_SetStep", + }, + { + type: "Tasks_LogStep", + }, + { + type: "Tasks_EmbedStep", + }, + { + type: "Tasks_SearchStep", + }, { type: "Tasks_WaitForInputStep", }, { type: "Tasks_IfElseWorkflowStep", }, + { + type: "Tasks_SwitchStep", + }, + { + type: "Tasks_ForeachStep", + }, + { + type: "Tasks_ParallelStep", + }, + { + type: "Tasks_MapReduceStep", + }, ], }, }, diff --git a/sdks/ts/src/api/schemas/$Tasks_EmbedStep.ts b/sdks/ts/src/api/schemas/$Tasks_EmbedStep.ts new file mode 100644 index 000000000..11cae473c --- /dev/null +++ b/sdks/ts/src/api/schemas/$Tasks_EmbedStep.ts @@ -0,0 +1,30 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $Tasks_EmbedStep = { + type: "all-of", + contains: [ + { + type: "Tasks_BaseWorkflowStep", + }, + { + properties: { + kind_: { + type: "Enum", + isRequired: true, + }, + embed: { + type: "all-of", + description: `The text to embed`, + contains: [ + { + type: "Docs_EmbedQueryRequest", + }, + ], + isRequired: true, + }, + }, + }, + ], +} as const; diff --git a/sdks/ts/src/api/schemas/$Tasks_ForeachDo.ts b/sdks/ts/src/api/schemas/$Tasks_ForeachDo.ts new file mode 100644 index 000000000..ef7102320 --- /dev/null +++ b/sdks/ts/src/api/schemas/$Tasks_ForeachDo.ts @@ -0,0 +1,63 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $Tasks_ForeachDo = { + properties: { + in: { + type: "all-of", + description: `The variable to iterate over`, + contains: [ + { + type: "Common_PyExpression", + }, + ], + isRequired: true, + }, + do: { + type: "array", + contains: { + type: "any-of", + contains: [ + { + type: "Tasks_ToolCallStep", + }, + { + type: "Tasks_YieldStep", + }, + { + type: "Tasks_PromptStep", + }, + { + type: "Tasks_ErrorWorkflowStep", + }, + { + type: "Tasks_SleepStep", + }, + { + type: "Tasks_ReturnStep", + }, + { + type: "Tasks_GetStep", + }, + { + type: "Tasks_SetStep", + }, + { + type: "Tasks_LogStep", + }, + { + type: "Tasks_EmbedStep", + }, + { + type: "Tasks_SearchStep", + }, + { + type: "Tasks_WaitForInputStep", + }, + ], + }, + isRequired: true, + }, + }, +} as const; diff --git a/sdks/ts/src/api/schemas/$Tasks_ForeachStep.ts b/sdks/ts/src/api/schemas/$Tasks_ForeachStep.ts new file mode 100644 index 000000000..3deb761b5 --- /dev/null +++ b/sdks/ts/src/api/schemas/$Tasks_ForeachStep.ts @@ -0,0 +1,30 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $Tasks_ForeachStep = { + type: "all-of", + contains: [ + { + type: "Tasks_BaseWorkflowStep", + }, + { + properties: { + kind_: { + type: "Enum", + isRequired: true, + }, + foreach: { + type: "all-of", + description: `The steps to run for each iteration`, + contains: [ + { + type: "Tasks_ForeachDo", + }, + ], + isRequired: true, + }, + }, + }, + ], +} as const; diff --git a/sdks/ts/src/api/schemas/$Tasks_GetStep.ts b/sdks/ts/src/api/schemas/$Tasks_GetStep.ts new file mode 100644 index 000000000..19da398f8 --- /dev/null +++ b/sdks/ts/src/api/schemas/$Tasks_GetStep.ts @@ -0,0 +1,25 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $Tasks_GetStep = { + type: "all-of", + contains: [ + { + type: "Tasks_BaseWorkflowStep", + }, + { + properties: { + kind_: { + type: "Enum", + isRequired: true, + }, + get: { + type: "string", + description: `The key to get`, + isRequired: true, + }, + }, + }, + ], +} as const; diff --git a/sdks/ts/src/api/schemas/$Tasks_IfElseWorkflowStep.ts b/sdks/ts/src/api/schemas/$Tasks_IfElseWorkflowStep.ts index ae1c3bbaf..a1def86dc 100644 --- a/sdks/ts/src/api/schemas/$Tasks_IfElseWorkflowStep.ts +++ b/sdks/ts/src/api/schemas/$Tasks_IfElseWorkflowStep.ts @@ -40,6 +40,27 @@ export const $Tasks_IfElseWorkflowStep = { { type: "Tasks_ErrorWorkflowStep", }, + { + type: "Tasks_SleepStep", + }, + { + type: "Tasks_ReturnStep", + }, + { + type: "Tasks_GetStep", + }, + { + type: "Tasks_SetStep", + }, + { + type: "Tasks_LogStep", + }, + { + type: "Tasks_EmbedStep", + }, + { + type: "Tasks_SearchStep", + }, { type: "Tasks_WaitForInputStep", }, @@ -62,6 +83,27 @@ export const $Tasks_IfElseWorkflowStep = { { type: "Tasks_ErrorWorkflowStep", }, + { + type: "Tasks_SleepStep", + }, + { + type: "Tasks_ReturnStep", + }, + { + type: "Tasks_GetStep", + }, + { + type: "Tasks_SetStep", + }, + { + type: "Tasks_LogStep", + }, + { + type: "Tasks_EmbedStep", + }, + { + type: "Tasks_SearchStep", + }, { type: "Tasks_WaitForInputStep", }, diff --git a/sdks/ts/src/api/schemas/$Tasks_LogStep.ts b/sdks/ts/src/api/schemas/$Tasks_LogStep.ts new file mode 100644 index 000000000..3565c937c --- /dev/null +++ b/sdks/ts/src/api/schemas/$Tasks_LogStep.ts @@ -0,0 +1,30 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $Tasks_LogStep = { + type: "all-of", + contains: [ + { + type: "Tasks_BaseWorkflowStep", + }, + { + properties: { + kind_: { + type: "Enum", + isRequired: true, + }, + log: { + type: "all-of", + description: `The value to log`, + contains: [ + { + type: "Common_PyExpression", + }, + ], + isRequired: true, + }, + }, + }, + ], +} as const; diff --git a/sdks/ts/src/api/schemas/$Tasks_MapOver.ts b/sdks/ts/src/api/schemas/$Tasks_MapOver.ts new file mode 100644 index 000000000..b12b438c1 --- /dev/null +++ b/sdks/ts/src/api/schemas/$Tasks_MapOver.ts @@ -0,0 +1,23 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $Tasks_MapOver = { + properties: { + over: { + type: "all-of", + description: `The variable to iterate over`, + contains: [ + { + type: "Common_PyExpression", + }, + ], + isRequired: true, + }, + workflow: { + type: "string", + description: `The subworkflow to run for each iteration`, + isRequired: true, + }, + }, +} as const; diff --git a/sdks/ts/src/api/schemas/$Tasks_MapReduceStep.ts b/sdks/ts/src/api/schemas/$Tasks_MapReduceStep.ts new file mode 100644 index 000000000..a88a4b6d7 --- /dev/null +++ b/sdks/ts/src/api/schemas/$Tasks_MapReduceStep.ts @@ -0,0 +1,40 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $Tasks_MapReduceStep = { + type: "all-of", + contains: [ + { + type: "Tasks_BaseWorkflowStep", + }, + { + properties: { + kind_: { + type: "Enum", + isRequired: true, + }, + map: { + type: "all-of", + description: `The steps to run for each iteration`, + contains: [ + { + type: "Tasks_MapOver", + }, + ], + isRequired: true, + }, + reduce: { + type: "all-of", + description: `The expression to reduce the results (\`_\` is a list of outputs)`, + contains: [ + { + type: "Common_PyExpression", + }, + ], + isRequired: true, + }, + }, + }, + ], +} as const; diff --git a/sdks/ts/src/api/schemas/$Tasks_ParallelStep.ts b/sdks/ts/src/api/schemas/$Tasks_ParallelStep.ts new file mode 100644 index 000000000..cf1f97820 --- /dev/null +++ b/sdks/ts/src/api/schemas/$Tasks_ParallelStep.ts @@ -0,0 +1,65 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $Tasks_ParallelStep = { + type: "all-of", + contains: [ + { + type: "Tasks_BaseWorkflowStep", + }, + { + properties: { + kind_: { + type: "Enum", + isRequired: true, + }, + parallel: { + type: "array", + contains: { + type: "any-of", + contains: [ + { + type: "Tasks_ToolCallStep", + }, + { + type: "Tasks_YieldStep", + }, + { + type: "Tasks_PromptStep", + }, + { + type: "Tasks_ErrorWorkflowStep", + }, + { + type: "Tasks_SleepStep", + }, + { + type: "Tasks_ReturnStep", + }, + { + type: "Tasks_GetStep", + }, + { + type: "Tasks_SetStep", + }, + { + type: "Tasks_LogStep", + }, + { + type: "Tasks_EmbedStep", + }, + { + type: "Tasks_SearchStep", + }, + { + type: "Tasks_WaitForInputStep", + }, + ], + }, + isRequired: true, + }, + }, + }, + ], +} as const; diff --git a/sdks/ts/src/api/schemas/$Tasks_PatchTaskRequest.ts b/sdks/ts/src/api/schemas/$Tasks_PatchTaskRequest.ts index 50c6201fe..561808f01 100644 --- a/sdks/ts/src/api/schemas/$Tasks_PatchTaskRequest.ts +++ b/sdks/ts/src/api/schemas/$Tasks_PatchTaskRequest.ts @@ -24,12 +24,45 @@ export const $Tasks_PatchTaskRequest = { { type: "Tasks_ErrorWorkflowStep", }, + { + type: "Tasks_SleepStep", + }, + { + type: "Tasks_ReturnStep", + }, + { + type: "Tasks_GetStep", + }, + { + type: "Tasks_SetStep", + }, + { + type: "Tasks_LogStep", + }, + { + type: "Tasks_EmbedStep", + }, + { + type: "Tasks_SearchStep", + }, { type: "Tasks_WaitForInputStep", }, { type: "Tasks_IfElseWorkflowStep", }, + { + type: "Tasks_SwitchStep", + }, + { + type: "Tasks_ForeachStep", + }, + { + type: "Tasks_ParallelStep", + }, + { + type: "Tasks_MapReduceStep", + }, ], }, }, diff --git a/sdks/ts/src/api/schemas/$Tasks_ReturnStep.ts b/sdks/ts/src/api/schemas/$Tasks_ReturnStep.ts new file mode 100644 index 000000000..04f7d4ab1 --- /dev/null +++ b/sdks/ts/src/api/schemas/$Tasks_ReturnStep.ts @@ -0,0 +1,27 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $Tasks_ReturnStep = { + type: "all-of", + contains: [ + { + type: "Tasks_BaseWorkflowStep", + }, + { + properties: { + kind_: { + type: "Enum", + isRequired: true, + }, + return: { + type: "dictionary", + contains: { + type: "Common_PyExpression", + }, + isRequired: true, + }, + }, + }, + ], +} as const; diff --git a/sdks/ts/src/api/schemas/$Tasks_SearchStep.ts b/sdks/ts/src/api/schemas/$Tasks_SearchStep.ts new file mode 100644 index 000000000..7c2ae3cb6 --- /dev/null +++ b/sdks/ts/src/api/schemas/$Tasks_SearchStep.ts @@ -0,0 +1,36 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $Tasks_SearchStep = { + type: "all-of", + contains: [ + { + type: "Tasks_BaseWorkflowStep", + }, + { + properties: { + kind_: { + type: "Enum", + isRequired: true, + }, + search: { + type: "any-of", + description: `The search query`, + contains: [ + { + type: "Docs_VectorDocSearchRequest", + }, + { + type: "Docs_TextOnlyDocSearchRequest", + }, + { + type: "Docs_HybridDocSearchRequest", + }, + ], + isRequired: true, + }, + }, + }, + ], +} as const; diff --git a/sdks/ts/src/api/schemas/$Tasks_SetKey.ts b/sdks/ts/src/api/schemas/$Tasks_SetKey.ts new file mode 100644 index 000000000..c93f367d1 --- /dev/null +++ b/sdks/ts/src/api/schemas/$Tasks_SetKey.ts @@ -0,0 +1,23 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $Tasks_SetKey = { + properties: { + key: { + type: "string", + description: `The key to set`, + isRequired: true, + }, + value: { + type: "all-of", + description: `The value to set`, + contains: [ + { + type: "Common_PyExpression", + }, + ], + isRequired: true, + }, + }, +} as const; diff --git a/sdks/ts/src/api/schemas/$Tasks_SetStep.ts b/sdks/ts/src/api/schemas/$Tasks_SetStep.ts new file mode 100644 index 000000000..7c768fc10 --- /dev/null +++ b/sdks/ts/src/api/schemas/$Tasks_SetStep.ts @@ -0,0 +1,36 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $Tasks_SetStep = { + type: "all-of", + contains: [ + { + type: "Tasks_BaseWorkflowStep", + }, + { + properties: { + kind_: { + type: "Enum", + isRequired: true, + }, + set: { + type: "any-of", + description: `The value to set`, + contains: [ + { + type: "Tasks_SetKey", + }, + { + type: "array", + contains: { + type: "Tasks_SetKey", + }, + }, + ], + isRequired: true, + }, + }, + }, + ], +} as const; diff --git a/sdks/ts/src/api/schemas/$Tasks_SleepFor.ts b/sdks/ts/src/api/schemas/$Tasks_SleepFor.ts new file mode 100644 index 000000000..a03d5591c --- /dev/null +++ b/sdks/ts/src/api/schemas/$Tasks_SleepFor.ts @@ -0,0 +1,32 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $Tasks_SleepFor = { + properties: { + seconds: { + type: "number", + description: `The number of seconds to sleep for`, + isRequired: true, + format: "uint16", + }, + minutes: { + type: "number", + description: `The number of minutes to sleep for`, + isRequired: true, + format: "uint16", + }, + hours: { + type: "number", + description: `The number of hours to sleep for`, + isRequired: true, + format: "uint16", + }, + days: { + type: "number", + description: `The number of days to sleep for`, + isRequired: true, + format: "uint16", + }, + }, +} as const; diff --git a/sdks/ts/src/api/schemas/$Tasks_SleepStep.ts b/sdks/ts/src/api/schemas/$Tasks_SleepStep.ts new file mode 100644 index 000000000..79d23cbcf --- /dev/null +++ b/sdks/ts/src/api/schemas/$Tasks_SleepStep.ts @@ -0,0 +1,30 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $Tasks_SleepStep = { + type: "all-of", + contains: [ + { + type: "Tasks_BaseWorkflowStep", + }, + { + properties: { + kind_: { + type: "Enum", + isRequired: true, + }, + sleep: { + type: "all-of", + description: `The duration to sleep for`, + contains: [ + { + type: "Tasks_SleepFor", + }, + ], + isRequired: true, + }, + }, + }, + ], +} as const; diff --git a/sdks/ts/src/api/schemas/$Tasks_SwitchStep.ts b/sdks/ts/src/api/schemas/$Tasks_SwitchStep.ts new file mode 100644 index 000000000..c8958af82 --- /dev/null +++ b/sdks/ts/src/api/schemas/$Tasks_SwitchStep.ts @@ -0,0 +1,27 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $Tasks_SwitchStep = { + type: "all-of", + contains: [ + { + type: "Tasks_BaseWorkflowStep", + }, + { + properties: { + kind_: { + type: "Enum", + isRequired: true, + }, + switch: { + type: "array", + contains: { + type: "Tasks_CaseThen", + }, + isRequired: true, + }, + }, + }, + ], +} as const; diff --git a/sdks/ts/src/api/schemas/$Tasks_Task.ts b/sdks/ts/src/api/schemas/$Tasks_Task.ts index d0431107d..4343739a9 100644 --- a/sdks/ts/src/api/schemas/$Tasks_Task.ts +++ b/sdks/ts/src/api/schemas/$Tasks_Task.ts @@ -24,12 +24,45 @@ export const $Tasks_Task = { { type: "Tasks_ErrorWorkflowStep", }, + { + type: "Tasks_SleepStep", + }, + { + type: "Tasks_ReturnStep", + }, + { + type: "Tasks_GetStep", + }, + { + type: "Tasks_SetStep", + }, + { + type: "Tasks_LogStep", + }, + { + type: "Tasks_EmbedStep", + }, + { + type: "Tasks_SearchStep", + }, { type: "Tasks_WaitForInputStep", }, { type: "Tasks_IfElseWorkflowStep", }, + { + type: "Tasks_SwitchStep", + }, + { + type: "Tasks_ForeachStep", + }, + { + type: "Tasks_ParallelStep", + }, + { + type: "Tasks_MapReduceStep", + }, ], }, }, diff --git a/sdks/ts/src/api/schemas/$Tasks_UpdateTaskRequest.ts b/sdks/ts/src/api/schemas/$Tasks_UpdateTaskRequest.ts index 5127d8f62..782e0bb56 100644 --- a/sdks/ts/src/api/schemas/$Tasks_UpdateTaskRequest.ts +++ b/sdks/ts/src/api/schemas/$Tasks_UpdateTaskRequest.ts @@ -24,12 +24,45 @@ export const $Tasks_UpdateTaskRequest = { { type: "Tasks_ErrorWorkflowStep", }, + { + type: "Tasks_SleepStep", + }, + { + type: "Tasks_ReturnStep", + }, + { + type: "Tasks_GetStep", + }, + { + type: "Tasks_SetStep", + }, + { + type: "Tasks_LogStep", + }, + { + type: "Tasks_EmbedStep", + }, + { + type: "Tasks_SearchStep", + }, { type: "Tasks_WaitForInputStep", }, { type: "Tasks_IfElseWorkflowStep", }, + { + type: "Tasks_SwitchStep", + }, + { + type: "Tasks_ForeachStep", + }, + { + type: "Tasks_ParallelStep", + }, + { + type: "Tasks_MapReduceStep", + }, ], }, }, diff --git a/typespec/tasks/models.tsp b/typespec/tasks/models.tsp index 7cde91759..749d47a11 100644 --- a/typespec/tasks/models.tsp +++ b/typespec/tasks/models.tsp @@ -1,17 +1,13 @@ import "@typespec/http"; -import "../agents"; import "../common"; -import "../chat"; -import "../entries"; import "../tools"; +import "./steps.tsp"; + using TypeSpec.Http; -using Agents; -using Chat; using Common; -using Entries; using Tools; namespace Tasks; @@ -20,105 +16,6 @@ namespace Tasks; // TASK MODELS // -alias WorkflowStepKind = - | /** A step that runs a tool */ - "tool_call" - | /** A step that runs a subworkflow */ - "yield" - | /** A step that runs a prompt */ - "prompt" - | /** A step that evaluates an expression */ - "evaluate" - | /** A step that runs a conditional */ - "if_else" - | /** A step that signals that it needs more input before resuming */ - "wait_for_input" - | /** Throw an error */ - "error"; - -model BaseWorkflowStep { - /** The kind of step */ - kind_: WorkflowStepKind; -} - -model ToolCallStep extends BaseWorkflowStep { - kind_: "tool_call" = "tool_call"; - - /** The tool to run */ - tool: toolRef; - - /** The input parameters for the tool */ - arguments: Record; -} - -/** An object where values are strings in the Common Expression Language that get evaluated before being passed downstream */ -alias ExpressionObject = Record; - -model YieldStep extends BaseWorkflowStep { - kind_: "yield" = "yield"; - - /** The subworkflow to run */ - workflow: string; - - /** The input parameters for the subworkflow */ - arguments: ExpressionObject; -} - -model PromptStep extends BaseWorkflowStep { - kind_: "prompt" = "prompt"; - - /** The prompt to run */ - prompt: string | InputChatMLMessage[]; - - /** Settings for the prompt */ - settings: ChatSettings; -} - -model EvaluateStep extends BaseWorkflowStep { - kind_: "evaluate" = "evaluate"; - - /** The expression to evaluate */ - evaluate: ExpressionObject; -} - -model ErrorWorkflowStep extends BaseWorkflowStep { - kind_: "error" = "error"; - - /** The error message */ - error: string; -} - -model WaitForInputStep extends BaseWorkflowStep { - kind_: "wait_for_input" = "wait_for_input"; - - /** Any additional info or data */ - info: string | Record; -} - -alias NonConditionalWorkflowStep = - | EvaluateStep - | ToolCallStep - | YieldStep - | PromptStep - | ErrorWorkflowStep - | WaitForInputStep; - -model IfElseWorkflowStep extends BaseWorkflowStep { - kind_: "if_else" = "if_else"; - - /** The condition to evaluate */ - `if`: PyExpression; - - /** The steps to run if the condition is true */ - then: NonConditionalWorkflowStep; - - /** The steps to run if the condition is false */ - `else`: NonConditionalWorkflowStep; -} - -alias WorkflowStep = NonConditionalWorkflowStep | IfElseWorkflowStep; -alias CreateWorkflowStep = WorkflowStep; - model Workflow { @key name: validPythonIdentifier; @@ -126,7 +23,7 @@ model Workflow { steps: WorkflowStep[]; } -model TaskTool extends CreateToolRequest{ +model TaskTool extends CreateToolRequest { /** Read-only: Whether the tool was inherited or not. Only applies within tasks. */ @visibility("read") inherited?: boolean = false; diff --git a/typespec/tasks/step_kind.tsp b/typespec/tasks/step_kind.tsp new file mode 100644 index 000000000..569c5737a --- /dev/null +++ b/typespec/tasks/step_kind.tsp @@ -0,0 +1,72 @@ +namespace Tasks; + +// +// STEP KINDS +// + +alias WorkflowStepKind = + //////////////////// + /// Common steps /// + //////////////////// + + | /** A step that runs a tool */ + "tool_call" + | /** A step that runs a prompt */ + "prompt" + | /** A step that evaluates an expression */ + "evaluate" + | /** A step that signals that it needs more input before resuming */ + "wait_for_input" + | /** log step */ + "log" + + //////////////////////// + /// Doc search steps /// + //////////////////////// + + | /** A step that can embed text */ + "embed" + | /** A step that can search for documents (in the agents doc store only) */ + "search" + + /////////////////////// + /// Key-value steps /// + /////////////////////// + + | /** set step */ + "set" + | /** get step */ + "get" + + /////////////////////// + /// Iteration steps /// + /////////////////////// + + | /** foreach step */ + "foreach" + | /** map_reduce step */ + "map_reduce" + | /** parallel step */ + "parallel" + + ///////////////////////// + /// Conditional steps /// + ///////////////////////// + + | /** switch step */ + "switch" + | /** A step that runs a conditional */ + "if_else" + + ////////////////////////// + /// Other control flow /// + ////////////////////////// + + | /** sleep step */ + "sleep" + | /** return step */ + "return" + | /** A step that runs a subworkflow */ + "yield" + | /** Throw an error */ + "error"; \ No newline at end of file diff --git a/typespec/tasks/steps.tsp b/typespec/tasks/steps.tsp new file mode 100644 index 000000000..b0db2a1e1 --- /dev/null +++ b/typespec/tasks/steps.tsp @@ -0,0 +1,269 @@ +import "@typespec/http"; + +import "../chat"; +import "../common"; +import "../docs"; +import "../entries"; + +import "./step_kind.tsp"; + +using TypeSpec.Http; + +using Chat; +using Common; +using Docs; +using Entries; + +namespace Tasks; + +// +// STEP DEFINITIONS +// + +/** An object where values are strings in the Common Expression Language that get evaluated before being passed downstream */ +alias ExpressionObject = Record; + +model BaseWorkflowStep { + /** The kind of step */ + kind_: WorkflowStepKind; +} + +alias NonConditionalWorkflowStep = + | EvaluateStep + | ToolCallStep + | YieldStep + | PromptStep + | ErrorWorkflowStep + | SleepStep + | ReturnStep + | GetStep + | SetStep + | LogStep + | EmbedStep + | SearchStep + | WaitForInputStep; + +alias ConditionalStep = IfElseWorkflowStep | SwitchStep; +alias IterationStep = ForeachStep | ParallelStep | MapReduceStep; +alias WorkflowStep = NonConditionalWorkflowStep | ConditionalStep | IterationStep; + +alias CreateWorkflowStep = WorkflowStep; + +//////////////////// +/// Common steps /// +//////////////////// + +model ToolCallStep extends BaseWorkflowStep { + kind_: "tool_call" = "tool_call"; + + /** The tool to run */ + tool: toolRef; + + /** The input parameters for the tool */ + arguments: Record; +} + +model PromptStep extends BaseWorkflowStep { + kind_: "prompt" = "prompt"; + + /** The prompt to run */ + prompt: string | InputChatMLMessage[]; + + /** Settings for the prompt */ + settings: ChatSettings; +} + +model EvaluateStep extends BaseWorkflowStep { + kind_: "evaluate" = "evaluate"; + + /** The expression to evaluate */ + evaluate: ExpressionObject; +} + +model WaitForInputStep extends BaseWorkflowStep { + kind_: "wait_for_input" = "wait_for_input"; + + /** Any additional info or data */ + info: string | Record; +} + +model LogStep extends BaseWorkflowStep { + kind_: "log" = "log"; + + /** The value to log */ + log: PyExpression; +} + +//////////////////////// +/// Doc search steps /// +//////////////////////// + +model EmbedStep extends BaseWorkflowStep { + kind_: "embed" = "embed"; + + /** The text to embed */ + embed: EmbedQueryRequest; +} + +model SearchStep extends BaseWorkflowStep { + kind_: "search" = "search"; + + /** The search query */ + search: DocSearchRequest; +} + +/////////////////////// +/// Key-value steps /// +/////////////////////// + +model GetStep extends BaseWorkflowStep { + kind_: "get" = "get"; + + /** The key to get */ + get: string; +} + +model SetKey { + /** The key to set */ + key: string; + + /** The value to set */ + value: PyExpression; +} + +model SetStep extends BaseWorkflowStep { + kind_: "set" = "set"; + + /** The value to set */ + set: SetKey | SetKey[]; +} + +/////////////////////// +/// Iteration steps /// +/////////////////////// + + +model ParallelStep extends BaseWorkflowStep { + kind_: "parallel" = "parallel"; + + /** The steps to run in parallel. Max concurrency will depend on the platform */ + parallel: NonConditionalWorkflowStep[]; +} + +model ForeachDo { + /** The variable to iterate over */ + in: PyExpression; + + /** The steps to run for each iteration */ + do: NonConditionalWorkflowStep[]; +} + +model ForeachStep extends BaseWorkflowStep { + kind_: "foreach" = "foreach"; + + /** The steps to run for each iteration */ + foreach: ForeachDo; +} + +model MapOver { + /** The variable to iterate over */ + over: PyExpression; + + /** The subworkflow to run for each iteration */ + workflow: string; +} + +model MapReduceStep extends BaseWorkflowStep { + kind_: "map_reduce" = "map_reduce"; + + /** The steps to run for each iteration */ + map: MapOver; + + /** The expression to reduce the results (`_` is a list of outputs) */ + reduce: PyExpression; +} + +///////////////////////// +/// Conditional steps /// +///////////////////////// + +model IfElseWorkflowStep extends BaseWorkflowStep { + kind_: "if_else" = "if_else"; + + /** The condition to evaluate */ + `if`: PyExpression; + + /** The steps to run if the condition is true */ + then: NonConditionalWorkflowStep; + + /** The steps to run if the condition is false */ + `else`: NonConditionalWorkflowStep; +} + +model CaseThen { + /** The condition to evaluate */ + case: PyExpression; + + /** The steps to run if the condition is true */ + then: NonConditionalWorkflowStep; +} + +model SwitchStep extends BaseWorkflowStep { + kind_: "switch" = "switch"; + + /** The cond tree */ + switch: CaseThen[]; +} + +////////////////////////// +/// Other control flow /// +////////////////////////// + +model YieldStep extends BaseWorkflowStep { + kind_: "yield" = "yield"; + + /** The subworkflow to run */ + workflow: string; + + /** The input parameters for the subworkflow */ + arguments: ExpressionObject; +} + +model ErrorWorkflowStep extends BaseWorkflowStep { + kind_: "error" = "error"; + + /** The error message */ + error: string; +} + +model SleepFor { + /** The number of seconds to sleep for */ + @minValue(0) + seconds: uint16 = 0; + + /** The number of minutes to sleep for */ + @minValue(0) + minutes: uint16 = 0; + + /** The number of hours to sleep for */ + @minValue(0) + hours: uint16 = 0; + + /** The number of days to sleep for */ + @minValue(0) + days: uint16 = 0; +} + +model SleepStep extends BaseWorkflowStep { + kind_: "sleep" = "sleep"; + + /** The duration to sleep for */ + sleep: SleepFor; +} + +model ReturnStep extends BaseWorkflowStep { + kind_: "return" = "return"; + + /** The value to return */ + `return`: ExpressionObject; +}