diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 2b079e60b..06a361d1c 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -15,6 +15,10 @@ jobs: python: ["3.9", "3.13"] os: [ubuntu-latest, ubuntu-arm, macos-intel, macos-arm, windows-latest] include: + # On 3.9 there is a problem with import errors caused by pytests' loader that surface due + # to a bug in CPython, so we avoid using the assert rewriter. + - python: "3.9" + pytestExtraArgs: "--assert=plain" - os: ubuntu-latest python: "3.13" docsTarget: true @@ -54,13 +58,13 @@ jobs: - run: poe lint - run: poe build-develop - run: mkdir junit-xml - - run: poe test -s --junit-xml=junit-xml/${{ matrix.python }}--${{ matrix.os }}.xml + - run: poe test ${{matrix.pytestExtraArgs}} -s --junit-xml=junit-xml/${{ matrix.python }}--${{ matrix.os }}.xml # Time skipping doesn't yet support ARM - if: ${{ !endsWith(matrix.os, '-arm') }} - run: poe test -s --workflow-environment time-skipping --junit-xml=junit-xml/${{ matrix.python }}--${{ matrix.os }}--time-skipping.xml + run: poe test ${{matrix.pytestExtraArgs}} -s --workflow-environment time-skipping --junit-xml=junit-xml/${{ matrix.python }}--${{ matrix.os }}--time-skipping.xml # Check cloud if proper target and not on fork - if: ${{ matrix.cloudTestTarget && (github.event.pull_request.head.repo.full_name == '' || github.event.pull_request.head.repo.full_name == 'temporalio/sdk-python') }} - run: poe test -s -k test_cloud_client --junit-xml=junit-xml/${{ matrix.python }}--${{ matrix.os }}--cloud.xml + run: poe test ${{matrix.pytestExtraArgs}} -s -k test_cloud_client --junit-xml=junit-xml/${{ matrix.python }}--${{ matrix.os }}--cloud.xml env: TEMPORAL_CLIENT_CLOUD_API_KEY: ${{ secrets.TEMPORAL_CLIENT_CLOUD_API_KEY }} TEMPORAL_CLIENT_CLOUD_API_VERSION: 2024-05-13-00 diff --git a/README.md b/README.md index 58e775da1..4c58f6766 100644 --- a/README.md +++ b/README.md @@ -510,7 +510,7 @@ class GreetingWorkflow: start_to_close_timeout=timedelta(seconds=5), ) workflow.logger.debug("Greeting set to %s", self._current_greeting) - + # Wait for salutation update or complete signal (this can be # cancelled) await asyncio.wait( @@ -536,7 +536,7 @@ class GreetingWorkflow: @workflow.query def current_greeting(self) -> str: return self._current_greeting - + @workflow.update def set_and_get_greeting(self, greeting: str) -> str: old = self._current_greeting @@ -622,7 +622,7 @@ Here are the decorators that can be applied: * May mutate workflow state, and make calls to other workflow APIs like starting activities, etc. * Also accepts the `name` and `dynamic` parameters like signal, with the same semantics. * Update handlers may optionally define a validator method by decorating it with `@update_handler_method.validator`. - To reject an update before any events are written to history, throw an exception in a validator. Validators cannot + To reject an update before any events are written to history, throw an exception in a validator. Validators cannot be `async`, cannot mutate workflow state, and return nothing. * See [Signal and update handlers](#signal-and-update-handlers) below * `@workflow.query` - Defines a method as a query @@ -994,7 +994,7 @@ To run an entire workflow outside of a sandbox, set `sandboxed=False` on the `@w it. This will run the entire workflow outside of the workflow which means it can share global state and other bad things. -To disable the sandbox entirely for a worker, set the `Worker` init's `workflow_runner` keyword argument to +To disable the sandbox entirely for a worker, set the `Worker` init's `workflow_runner` keyword argument to `temporalio.worker.UnsandboxedWorkflowRunner()`. This value is defaulted to `temporalio.worker.workflow_sandbox.SandboxedWorkflowRunner()` so by changing it to the unsandboxed runner, the sandbox will not be used at all. diff --git a/temporalio/bridge/Cargo.lock b/temporalio/bridge/Cargo.lock index b5e428f03..06d1c1151 100644 --- a/temporalio/bridge/Cargo.lock +++ b/temporalio/bridge/Cargo.lock @@ -242,9 +242,9 @@ dependencies = [ [[package]] name = "cc" -version = "1.2.17" +version = "1.2.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fcb57c740ae1daf453ae85f16e37396f672b039e00d9d866e07ddb24e328e3a" +checksum = "8e3a13707ac958681c13b39b458c073d0d9bc8a22cb1b2f4c8e55eb72c13f362" dependencies = [ "jobserver", "libc", @@ -340,9 +340,9 @@ dependencies = [ [[package]] name = "crossbeam-channel" -version = "0.5.14" +version = "0.5.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "06ba6d68e24814cb8de6bb986db8222d3a027d15872cabc0d18817bc3c0e4471" +checksum = "82b8f8f868b36967f9606790d1903570de9ceaf870a7bf9fbbd3016d636a2cb2" dependencies = [ "crossbeam-utils", ] @@ -374,9 +374,9 @@ dependencies = [ [[package]] name = "darling" -version = "0.20.10" +version = "0.20.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6f63b86c8a8826a49b8c21f08a2d07338eec8d900540f8630dc76284be802989" +checksum = "fc7f46116c46ff9ab3eb1597a45688b6715c6e628b5c133e288e709a29bcb4ee" dependencies = [ "darling_core", "darling_macro", @@ -384,9 +384,9 @@ dependencies = [ [[package]] name = "darling_core" -version = "0.20.10" +version = "0.20.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95133861a8032aaea082871032f5815eb9e98cef03fa916ab4500513994df9e5" +checksum = "0d00b9596d185e565c2207a0b01f8bd1a135483d02d9b7b0a54b11da8d53412e" dependencies = [ "fnv", "ident_case", @@ -398,9 +398,9 @@ dependencies = [ [[package]] name = "darling_macro" -version = "0.20.10" +version = "0.20.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d336a2a514f6ccccaa3e09b02d41d35330c07ddf03a62165fcec10bb561c7806" +checksum = "fc34b93ccb385b40dc71c6fceac4b2ad23662c7eeb248cf10d529b7e055b6ead" dependencies = [ "darling_core", "quote", @@ -429,9 +429,9 @@ checksum = "da692b8d1080ea3045efaab14434d40468c3d8657e42abddfffca87b428f4c1b" [[package]] name = "deranged" -version = "0.4.1" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "28cfac68e08048ae1883171632c2aef3ebc555621ae56fbccce1cbf22dd7f058" +checksum = "9c9e6a11ca8224451684bc0d7d5a7adbf8f2fd6887261a1cfc3c0432f9d4068e" dependencies = [ "powerfmt", ] @@ -583,9 +583,9 @@ dependencies = [ [[package]] name = "errno" -version = "0.3.10" +version = "0.3.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33d852cb9b869c2a9b3df2f71a3074817f01e1844f839a144f5fcef059a4eb5d" +checksum = "976dd42dc7e85965fe702eb8164f21f450704bdde31faefd6471dba214cb594e" dependencies = [ "libc", "windows-sys 0.59.0", @@ -617,9 +617,9 @@ checksum = "1d674e81391d1e1ab681a28d99df07927c6d4aa5b027d7da16ba32d1d21ecd99" [[package]] name = "flate2" -version = "1.1.0" +version = "1.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "11faaf5a5236997af9848be0bef4db95824b1d534ebc64d0f0c6cf3e67bd38dc" +checksum = "7ced92e76e966ca2fd84c8f7aa01a4aea65b0eb6648d72f7c8f3e2764a67fece" dependencies = [ "crc32fast", "miniz_oxide", @@ -842,7 +842,7 @@ dependencies = [ "futures-core", "futures-sink", "http", - "indexmap 2.8.0", + "indexmap 2.9.0", "slab", "tokio", "tokio-util", @@ -993,9 +993,9 @@ dependencies = [ [[package]] name = "hyper-util" -version = "0.1.10" +version = "0.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df2dcfbe0677734ab2f3ffa7fa7bfd4706bfdc1ef393f2ee30184aed67e631b4" +checksum = "497bbc33a26fdd4af9ed9c70d63f61cf56a938375fbb32df34db9b1cd6d643f2" dependencies = [ "bytes", "futures-channel", @@ -1003,6 +1003,7 @@ dependencies = [ "http", "http-body", "hyper", + "libc", "pin-project-lite", "socket2", "tokio", @@ -1051,9 +1052,9 @@ dependencies = [ [[package]] name = "icu_locid_transform_data" -version = "1.5.0" +version = "1.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fdc8ff3388f852bede6b579ad4e978ab004f139284d7b28715f773507b946f6e" +checksum = "7515e6d781098bf9f7205ab3fc7e9709d34554ae0b21ddbcb5febfa4bc7df11d" [[package]] name = "icu_normalizer" @@ -1075,9 +1076,9 @@ dependencies = [ [[package]] name = "icu_normalizer_data" -version = "1.5.0" +version = "1.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8cafbf7aa791e9b22bec55a167906f9e1215fd475cd22adfcf660e03e989516" +checksum = "c5e8338228bdc8ab83303f16b797e177953730f601a96c25d10cb3ab0daa0cb7" [[package]] name = "icu_properties" @@ -1096,9 +1097,9 @@ dependencies = [ [[package]] name = "icu_properties_data" -version = "1.5.0" +version = "1.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "67a8effbc3dd3e4ba1afa8ad918d5684b8868b3b26500753effea8d2eed19569" +checksum = "85fb8799753b75aee8d2a21d7c14d9f38921b54b3dbda10f5a3c7a7b82dba5e2" [[package]] name = "icu_provider" @@ -1167,9 +1168,9 @@ dependencies = [ [[package]] name = "indexmap" -version = "2.8.0" +version = "2.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3954d50fe15b02142bf25d3b8bdadb634ec3948f103d04ffe3031bc8fe9d7058" +checksum = "cea70ddb795996207ad57735b50c5982d8844f38ba9ee5f1aedcfb708a2aa11e" dependencies = [ "equivalent", "hashbrown 0.15.2", @@ -1231,10 +1232,11 @@ checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c" [[package]] name = "jobserver" -version = "0.1.32" +version = "0.1.33" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "48d1dbcbbeb6a7fec7e059840aa538bd62aaccf972c7346c4d9d2059312853d0" +checksum = "38f262f097c174adebe41eb73d66ae9c06b2844fb0da69969647bbddd9b0538a" dependencies = [ + "getrandom 0.3.2", "libc", ] @@ -1273,9 +1275,9 @@ dependencies = [ [[package]] name = "linux-raw-sys" -version = "0.9.3" +version = "0.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fe7db12097d22ec582439daf8618b8fdd1a7bef6270e9af3b1ebcd30893cf413" +checksum = "cd945864f07fe9f5371a27ad7b52a172b4b499999f1d97574c9fa68373937e12" [[package]] name = "litemap" @@ -1373,9 +1375,9 @@ checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" [[package]] name = "miniz_oxide" -version = "0.8.5" +version = "0.8.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e3e04debbb59698c15bacbb6d93584a8c0ca9cc3213cb423d31f760d8843ce5" +checksum = "3be647b768db090acb35d5ec5db2b0e1f1de11133ca123b9eacf5137868f892a" dependencies = [ "adler2", ] @@ -1480,9 +1482,9 @@ dependencies = [ [[package]] name = "once_cell" -version = "1.21.1" +version = "1.21.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d75b0bedcc4fe52caa0e03d9f1151a323e4aa5e2d78ba3580400cd3c9e2bc4bc" +checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d" [[package]] name = "openssl-probe" @@ -1634,7 +1636,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3672b37090dbd86368a4145bc067582552b29c27377cad4e0a306c97f9bd7772" dependencies = [ "fixedbitset", - "indexmap 2.8.0", + "indexmap 2.9.0", ] [[package]] @@ -1742,9 +1744,9 @@ dependencies = [ [[package]] name = "prettyplease" -version = "0.2.31" +version = "0.2.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5316f57387668042f561aae71480de936257848f9c43ce528e311d89a07cadeb" +checksum = "664ec5419c51e34154eec046ebcba56312d5a2fc3b09a06da188e1ad21afadf6" dependencies = [ "proc-macro2", "syn", @@ -2120,9 +2122,9 @@ dependencies = [ [[package]] name = "redox_syscall" -version = "0.5.10" +version = "0.5.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b8c0c260b63a8219631167be35e6a988e9554dbd323f8bd08439c8ed1302bd1" +checksum = "d2f103c6d277498fbceb16e84d317e2a400f160f46904d5f5410848c829511a3" dependencies = [ "bitflags", ] @@ -2279,9 +2281,9 @@ version = "0.1.0" [[package]] name = "rustix" -version = "1.0.3" +version = "1.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e56a18552996ac8d29ecc3b190b4fdbb2d91ca4ec396de7bbffaf43f3d637e96" +checksum = "d97817398dd4bb2e6da002002db259209759911da105da92bec29ccb12cf58bf" dependencies = [ "bitflags", "errno", @@ -2292,9 +2294,9 @@ dependencies = [ [[package]] name = "rustls" -version = "0.23.25" +version = "0.23.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "822ee9188ac4ec04a2f0531e55d035fb2de73f18b41a63c70c2712503b6fb13c" +checksum = "df51b5869f3a441595eac5e8ff14d486ff285f7b8c0df8770e49c3b56351f0f0" dependencies = [ "log", "once_cell", @@ -2507,15 +2509,15 @@ dependencies = [ [[package]] name = "smallvec" -version = "1.14.0" +version = "1.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7fcf8323ef1faaee30a44a340193b1ac6814fd9b7b4e88e9d4519a3e4abe1cfd" +checksum = "8917285742e9f3e1683f0a9c4e6b57960b7314d0b08d30d1ecd426713ee2eee9" [[package]] name = "socket2" -version = "0.5.8" +version = "0.5.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c970269d99b64e60ec3bd6ad27270092a5394c4e309314b18ae3fe575695fbe8" +checksum = "4f5fd57c80058a56cf5c777ab8a126398ece8e442983605d280a44ce79d0edef" dependencies = [ "libc", "windows-sys 0.52.0", @@ -2873,9 +2875,9 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" -version = "1.44.1" +version = "1.44.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f382da615b842244d4b8738c82ed1275e6c5dd90c459a30941cd07080b06c91a" +checksum = "e6b88822cbe49de4185e3a4cbf8321dd487cf5fe0c5c65695fef6346371e9c48" dependencies = [ "backtrace", "bytes", @@ -3720,9 +3722,9 @@ dependencies = [ [[package]] name = "zip" -version = "2.5.0" +version = "2.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "27c03817464f64e23f6f37574b4fdc8cf65925b5bfd2b0f2aedf959791941f88" +checksum = "1dcb24d0152526ae49b9b96c1dcf71850ca1e0b882e4e28ed898a93c41334744" dependencies = [ "aes", "arbitrary", @@ -3734,7 +3736,7 @@ dependencies = [ "flate2", "getrandom 0.3.2", "hmac", - "indexmap 2.8.0", + "indexmap 2.9.0", "lzma-rs", "memchr", "pbkdf2", diff --git a/temporalio/bridge/proto/common/__init__.py b/temporalio/bridge/proto/common/__init__.py index 0e8f543d6..5622fffb8 100644 --- a/temporalio/bridge/proto/common/__init__.py +++ b/temporalio/bridge/proto/common/__init__.py @@ -1,6 +1,11 @@ -from .common_pb2 import NamespacedWorkflowExecution, VersioningIntent +from .common_pb2 import ( + NamespacedWorkflowExecution, + VersioningIntent, + WorkerDeploymentVersion, +) __all__ = [ "NamespacedWorkflowExecution", "VersioningIntent", + "WorkerDeploymentVersion", ] diff --git a/temporalio/bridge/proto/common/common_pb2.py b/temporalio/bridge/proto/common/common_pb2.py index 633dfd1c8..c56456fce 100644 --- a/temporalio/bridge/proto/common/common_pb2.py +++ b/temporalio/bridge/proto/common/common_pb2.py @@ -18,7 +18,7 @@ from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( - b'\n%temporal/sdk/core/common/common.proto\x12\x0e\x63oresdk.common\x1a\x1egoogle/protobuf/duration.proto"U\n\x1bNamespacedWorkflowExecution\x12\x11\n\tnamespace\x18\x01 \x01(\t\x12\x13\n\x0bworkflow_id\x18\x02 \x01(\t\x12\x0e\n\x06run_id\x18\x03 \x01(\t*@\n\x10VersioningIntent\x12\x0f\n\x0bUNSPECIFIED\x10\x00\x12\x0e\n\nCOMPATIBLE\x10\x01\x12\x0b\n\x07\x44\x45\x46\x41ULT\x10\x02\x42,\xea\x02)Temporalio::Internal::Bridge::Api::Commonb\x06proto3' + b'\n%temporal/sdk/core/common/common.proto\x12\x0e\x63oresdk.common\x1a\x1egoogle/protobuf/duration.proto"U\n\x1bNamespacedWorkflowExecution\x12\x11\n\tnamespace\x18\x01 \x01(\t\x12\x13\n\x0bworkflow_id\x18\x02 \x01(\t\x12\x0e\n\x06run_id\x18\x03 \x01(\t"D\n\x17WorkerDeploymentVersion\x12\x17\n\x0f\x64\x65ployment_name\x18\x01 \x01(\t\x12\x10\n\x08\x62uild_id\x18\x02 \x01(\t*@\n\x10VersioningIntent\x12\x0f\n\x0bUNSPECIFIED\x10\x00\x12\x0e\n\nCOMPATIBLE\x10\x01\x12\x0b\n\x07\x44\x45\x46\x41ULT\x10\x02\x42,\xea\x02)Temporalio::Internal::Bridge::Api::Commonb\x06proto3' ) _VERSIONINGINTENT = DESCRIPTOR.enum_types_by_name["VersioningIntent"] @@ -31,6 +31,7 @@ _NAMESPACEDWORKFLOWEXECUTION = DESCRIPTOR.message_types_by_name[ "NamespacedWorkflowExecution" ] +_WORKERDEPLOYMENTVERSION = DESCRIPTOR.message_types_by_name["WorkerDeploymentVersion"] NamespacedWorkflowExecution = _reflection.GeneratedProtocolMessageType( "NamespacedWorkflowExecution", (_message.Message,), @@ -42,13 +43,26 @@ ) _sym_db.RegisterMessage(NamespacedWorkflowExecution) +WorkerDeploymentVersion = _reflection.GeneratedProtocolMessageType( + "WorkerDeploymentVersion", + (_message.Message,), + { + "DESCRIPTOR": _WORKERDEPLOYMENTVERSION, + "__module__": "temporal.sdk.core.common.common_pb2", + # @@protoc_insertion_point(class_scope:coresdk.common.WorkerDeploymentVersion) + }, +) +_sym_db.RegisterMessage(WorkerDeploymentVersion) + if _descriptor._USE_C_DESCRIPTORS == False: DESCRIPTOR._options = None DESCRIPTOR._serialized_options = ( b"\352\002)Temporalio::Internal::Bridge::Api::Common" ) - _VERSIONINGINTENT._serialized_start = 176 - _VERSIONINGINTENT._serialized_end = 240 + _VERSIONINGINTENT._serialized_start = 246 + _VERSIONINGINTENT._serialized_end = 310 _NAMESPACEDWORKFLOWEXECUTION._serialized_start = 89 _NAMESPACEDWORKFLOWEXECUTION._serialized_end = 174 + _WORKERDEPLOYMENTVERSION._serialized_start = 176 + _WORKERDEPLOYMENTVERSION._serialized_end = 244 # @@protoc_insertion_point(module_scope) diff --git a/temporalio/bridge/proto/common/common_pb2.pyi b/temporalio/bridge/proto/common/common_pb2.pyi index 279ed2975..739a129e1 100644 --- a/temporalio/bridge/proto/common/common_pb2.pyi +++ b/temporalio/bridge/proto/common/common_pb2.pyi @@ -99,3 +99,25 @@ class NamespacedWorkflowExecution(google.protobuf.message.Message): ) -> None: ... global___NamespacedWorkflowExecution = NamespacedWorkflowExecution + +class WorkerDeploymentVersion(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + DEPLOYMENT_NAME_FIELD_NUMBER: builtins.int + BUILD_ID_FIELD_NUMBER: builtins.int + deployment_name: builtins.str + build_id: builtins.str + def __init__( + self, + *, + deployment_name: builtins.str = ..., + build_id: builtins.str = ..., + ) -> None: ... + def ClearField( + self, + field_name: typing_extensions.Literal[ + "build_id", b"build_id", "deployment_name", b"deployment_name" + ], + ) -> None: ... + +global___WorkerDeploymentVersion = WorkerDeploymentVersion diff --git a/temporalio/bridge/proto/workflow_activation/workflow_activation_pb2.py b/temporalio/bridge/proto/workflow_activation/workflow_activation_pb2.py index 820c9b3bd..7d2b4786d 100644 --- a/temporalio/bridge/proto/workflow_activation/workflow_activation_pb2.py +++ b/temporalio/bridge/proto/workflow_activation/workflow_activation_pb2.py @@ -44,7 +44,7 @@ ) DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( - b'\n?temporal/sdk/core/workflow_activation/workflow_activation.proto\x12\x1b\x63oresdk.workflow_activation\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a%temporal/api/failure/v1/message.proto\x1a$temporal/api/update/v1/message.proto\x1a$temporal/api/common/v1/message.proto\x1a$temporal/api/enums/v1/workflow.proto\x1a\x37temporal/sdk/core/activity_result/activity_result.proto\x1a\x35temporal/sdk/core/child_workflow/child_workflow.proto\x1a%temporal/sdk/core/common/common.proto\x1a#temporal/sdk/core/nexus/nexus.proto"\xc7\x02\n\x12WorkflowActivation\x12\x0e\n\x06run_id\x18\x01 \x01(\t\x12-\n\ttimestamp\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x14\n\x0cis_replaying\x18\x03 \x01(\x08\x12\x16\n\x0ehistory_length\x18\x04 \x01(\r\x12@\n\x04jobs\x18\x05 \x03(\x0b\x32\x32.coresdk.workflow_activation.WorkflowActivationJob\x12 \n\x18\x61vailable_internal_flags\x18\x06 \x03(\r\x12\x1a\n\x12history_size_bytes\x18\x07 \x01(\x04\x12!\n\x19\x63ontinue_as_new_suggested\x18\x08 \x01(\x08\x12!\n\x19\x62uild_id_for_current_task\x18\t \x01(\t"\xe0\n\n\x15WorkflowActivationJob\x12N\n\x13initialize_workflow\x18\x01 \x01(\x0b\x32/.coresdk.workflow_activation.InitializeWorkflowH\x00\x12<\n\nfire_timer\x18\x02 \x01(\x0b\x32&.coresdk.workflow_activation.FireTimerH\x00\x12K\n\x12update_random_seed\x18\x04 \x01(\x0b\x32-.coresdk.workflow_activation.UpdateRandomSeedH\x00\x12\x44\n\x0equery_workflow\x18\x05 \x01(\x0b\x32*.coresdk.workflow_activation.QueryWorkflowH\x00\x12\x46\n\x0f\x63\x61ncel_workflow\x18\x06 \x01(\x0b\x32+.coresdk.workflow_activation.CancelWorkflowH\x00\x12\x46\n\x0fsignal_workflow\x18\x07 \x01(\x0b\x32+.coresdk.workflow_activation.SignalWorkflowH\x00\x12H\n\x10resolve_activity\x18\x08 \x01(\x0b\x32,.coresdk.workflow_activation.ResolveActivityH\x00\x12G\n\x10notify_has_patch\x18\t \x01(\x0b\x32+.coresdk.workflow_activation.NotifyHasPatchH\x00\x12q\n&resolve_child_workflow_execution_start\x18\n \x01(\x0b\x32?.coresdk.workflow_activation.ResolveChildWorkflowExecutionStartH\x00\x12\x66\n resolve_child_workflow_execution\x18\x0b \x01(\x0b\x32:.coresdk.workflow_activation.ResolveChildWorkflowExecutionH\x00\x12\x66\n resolve_signal_external_workflow\x18\x0c \x01(\x0b\x32:.coresdk.workflow_activation.ResolveSignalExternalWorkflowH\x00\x12u\n(resolve_request_cancel_external_workflow\x18\r \x01(\x0b\x32\x41.coresdk.workflow_activation.ResolveRequestCancelExternalWorkflowH\x00\x12:\n\tdo_update\x18\x0e \x01(\x0b\x32%.coresdk.workflow_activation.DoUpdateH\x00\x12`\n\x1dresolve_nexus_operation_start\x18\x0f \x01(\x0b\x32\x37.coresdk.workflow_activation.ResolveNexusOperationStartH\x00\x12U\n\x17resolve_nexus_operation\x18\x10 \x01(\x0b\x32\x32.coresdk.workflow_activation.ResolveNexusOperationH\x00\x12I\n\x11remove_from_cache\x18\x32 \x01(\x0b\x32,.coresdk.workflow_activation.RemoveFromCacheH\x00\x42\t\n\x07variant"\xd9\n\n\x12InitializeWorkflow\x12\x15\n\rworkflow_type\x18\x01 \x01(\t\x12\x13\n\x0bworkflow_id\x18\x02 \x01(\t\x12\x32\n\targuments\x18\x03 \x03(\x0b\x32\x1f.temporal.api.common.v1.Payload\x12\x17\n\x0frandomness_seed\x18\x04 \x01(\x04\x12M\n\x07headers\x18\x05 \x03(\x0b\x32<.coresdk.workflow_activation.InitializeWorkflow.HeadersEntry\x12\x10\n\x08identity\x18\x06 \x01(\t\x12I\n\x14parent_workflow_info\x18\x07 \x01(\x0b\x32+.coresdk.common.NamespacedWorkflowExecution\x12=\n\x1aworkflow_execution_timeout\x18\x08 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x37\n\x14workflow_run_timeout\x18\t \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x38\n\x15workflow_task_timeout\x18\n \x01(\x0b\x32\x19.google.protobuf.Duration\x12\'\n\x1f\x63ontinued_from_execution_run_id\x18\x0b \x01(\t\x12J\n\x13\x63ontinued_initiator\x18\x0c \x01(\x0e\x32-.temporal.api.enums.v1.ContinueAsNewInitiator\x12;\n\x11\x63ontinued_failure\x18\r \x01(\x0b\x32 .temporal.api.failure.v1.Failure\x12@\n\x16last_completion_result\x18\x0e \x01(\x0b\x32 .temporal.api.common.v1.Payloads\x12\x1e\n\x16\x66irst_execution_run_id\x18\x0f \x01(\t\x12\x39\n\x0cretry_policy\x18\x10 \x01(\x0b\x32#.temporal.api.common.v1.RetryPolicy\x12\x0f\n\x07\x61ttempt\x18\x11 \x01(\x05\x12\x15\n\rcron_schedule\x18\x12 \x01(\t\x12\x46\n"workflow_execution_expiration_time\x18\x13 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x45\n"cron_schedule_to_schedule_interval\x18\x14 \x01(\x0b\x32\x19.google.protobuf.Duration\x12*\n\x04memo\x18\x15 \x01(\x0b\x32\x1c.temporal.api.common.v1.Memo\x12\x43\n\x11search_attributes\x18\x16 \x01(\x0b\x32(.temporal.api.common.v1.SearchAttributes\x12.\n\nstart_time\x18\x17 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12@\n\rroot_workflow\x18\x18 \x01(\x0b\x32).temporal.api.common.v1.WorkflowExecution\x12\x32\n\x08priority\x18\x19 \x01(\x0b\x32 .temporal.api.common.v1.Priority\x1aO\n\x0cHeadersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12.\n\x05value\x18\x02 \x01(\x0b\x32\x1f.temporal.api.common.v1.Payload:\x02\x38\x01"\x18\n\tFireTimer\x12\x0b\n\x03seq\x18\x01 \x01(\r"m\n\x0fResolveActivity\x12\x0b\n\x03seq\x18\x01 \x01(\r\x12;\n\x06result\x18\x02 \x01(\x0b\x32+.coresdk.activity_result.ActivityResolution\x12\x10\n\x08is_local\x18\x03 \x01(\x08"\xd1\x02\n"ResolveChildWorkflowExecutionStart\x12\x0b\n\x03seq\x18\x01 \x01(\r\x12[\n\tsucceeded\x18\x02 \x01(\x0b\x32\x46.coresdk.workflow_activation.ResolveChildWorkflowExecutionStartSuccessH\x00\x12X\n\x06\x66\x61iled\x18\x03 \x01(\x0b\x32\x46.coresdk.workflow_activation.ResolveChildWorkflowExecutionStartFailureH\x00\x12]\n\tcancelled\x18\x04 \x01(\x0b\x32H.coresdk.workflow_activation.ResolveChildWorkflowExecutionStartCancelledH\x00\x42\x08\n\x06status";\n)ResolveChildWorkflowExecutionStartSuccess\x12\x0e\n\x06run_id\x18\x01 \x01(\t"\xa6\x01\n)ResolveChildWorkflowExecutionStartFailure\x12\x13\n\x0bworkflow_id\x18\x01 \x01(\t\x12\x15\n\rworkflow_type\x18\x02 \x01(\t\x12M\n\x05\x63\x61use\x18\x03 \x01(\x0e\x32>.coresdk.child_workflow.StartChildWorkflowExecutionFailedCause"`\n+ResolveChildWorkflowExecutionStartCancelled\x12\x31\n\x07\x66\x61ilure\x18\x01 \x01(\x0b\x32 .temporal.api.failure.v1.Failure"i\n\x1dResolveChildWorkflowExecution\x12\x0b\n\x03seq\x18\x01 \x01(\r\x12;\n\x06result\x18\x02 \x01(\x0b\x32+.coresdk.child_workflow.ChildWorkflowResult"+\n\x10UpdateRandomSeed\x12\x17\n\x0frandomness_seed\x18\x01 \x01(\x04"\x84\x02\n\rQueryWorkflow\x12\x10\n\x08query_id\x18\x01 \x01(\t\x12\x12\n\nquery_type\x18\x02 \x01(\t\x12\x32\n\targuments\x18\x03 \x03(\x0b\x32\x1f.temporal.api.common.v1.Payload\x12H\n\x07headers\x18\x05 \x03(\x0b\x32\x37.coresdk.workflow_activation.QueryWorkflow.HeadersEntry\x1aO\n\x0cHeadersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12.\n\x05value\x18\x02 \x01(\x0b\x32\x1f.temporal.api.common.v1.Payload:\x02\x38\x01" \n\x0e\x43\x61ncelWorkflow\x12\x0e\n\x06reason\x18\x01 \x01(\t"\x83\x02\n\x0eSignalWorkflow\x12\x13\n\x0bsignal_name\x18\x01 \x01(\t\x12.\n\x05input\x18\x02 \x03(\x0b\x32\x1f.temporal.api.common.v1.Payload\x12\x10\n\x08identity\x18\x03 \x01(\t\x12I\n\x07headers\x18\x05 \x03(\x0b\x32\x38.coresdk.workflow_activation.SignalWorkflow.HeadersEntry\x1aO\n\x0cHeadersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12.\n\x05value\x18\x02 \x01(\x0b\x32\x1f.temporal.api.common.v1.Payload:\x02\x38\x01""\n\x0eNotifyHasPatch\x12\x10\n\x08patch_id\x18\x01 \x01(\t"_\n\x1dResolveSignalExternalWorkflow\x12\x0b\n\x03seq\x18\x01 \x01(\r\x12\x31\n\x07\x66\x61ilure\x18\x02 \x01(\x0b\x32 .temporal.api.failure.v1.Failure"f\n$ResolveRequestCancelExternalWorkflow\x12\x0b\n\x03seq\x18\x01 \x01(\r\x12\x31\n\x07\x66\x61ilure\x18\x02 \x01(\x0b\x32 .temporal.api.failure.v1.Failure"\xcb\x02\n\x08\x44oUpdate\x12\n\n\x02id\x18\x01 \x01(\t\x12\x1c\n\x14protocol_instance_id\x18\x02 \x01(\t\x12\x0c\n\x04name\x18\x03 \x01(\t\x12.\n\x05input\x18\x04 \x03(\x0b\x32\x1f.temporal.api.common.v1.Payload\x12\x43\n\x07headers\x18\x05 \x03(\x0b\x32\x32.coresdk.workflow_activation.DoUpdate.HeadersEntry\x12*\n\x04meta\x18\x06 \x01(\x0b\x32\x1c.temporal.api.update.v1.Meta\x12\x15\n\rrun_validator\x18\x07 \x01(\x08\x1aO\n\x0cHeadersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12.\n\x05value\x18\x02 \x01(\x0b\x32\x1f.temporal.api.common.v1.Payload:\x02\x38\x01"\xa7\x01\n\x1aResolveNexusOperationStart\x12\x0b\n\x03seq\x18\x01 \x01(\r\x12\x16\n\x0coperation_id\x18\x02 \x01(\tH\x00\x12\x16\n\x0cstarted_sync\x18\x03 \x01(\x08H\x00\x12\x42\n\x16\x63\x61ncelled_before_start\x18\x04 \x01(\x0b\x32 .temporal.api.failure.v1.FailureH\x00\x42\x08\n\x06status"Y\n\x15ResolveNexusOperation\x12\x0b\n\x03seq\x18\x01 \x01(\r\x12\x33\n\x06result\x18\x02 \x01(\x0b\x32#.coresdk.nexus.NexusOperationResult"\xe0\x02\n\x0fRemoveFromCache\x12\x0f\n\x07message\x18\x01 \x01(\t\x12K\n\x06reason\x18\x02 \x01(\x0e\x32;.coresdk.workflow_activation.RemoveFromCache.EvictionReason"\xee\x01\n\x0e\x45victionReason\x12\x0f\n\x0bUNSPECIFIED\x10\x00\x12\x0e\n\nCACHE_FULL\x10\x01\x12\x0e\n\nCACHE_MISS\x10\x02\x12\x12\n\x0eNONDETERMINISM\x10\x03\x12\r\n\tLANG_FAIL\x10\x04\x12\x12\n\x0eLANG_REQUESTED\x10\x05\x12\x12\n\x0eTASK_NOT_FOUND\x10\x06\x12\x15\n\x11UNHANDLED_COMMAND\x10\x07\x12\t\n\x05\x46\x41TAL\x10\x08\x12\x1f\n\x1bPAGINATION_OR_HISTORY_FETCH\x10\t\x12\x1d\n\x19WORKFLOW_EXECUTION_ENDING\x10\nB8\xea\x02\x35Temporalio::Internal::Bridge::Api::WorkflowActivationb\x06proto3' + b'\n?temporal/sdk/core/workflow_activation/workflow_activation.proto\x12\x1b\x63oresdk.workflow_activation\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a%temporal/api/failure/v1/message.proto\x1a$temporal/api/update/v1/message.proto\x1a$temporal/api/common/v1/message.proto\x1a$temporal/api/enums/v1/workflow.proto\x1a\x37temporal/sdk/core/activity_result/activity_result.proto\x1a\x35temporal/sdk/core/child_workflow/child_workflow.proto\x1a%temporal/sdk/core/common/common.proto\x1a#temporal/sdk/core/nexus/nexus.proto"\xfa\x02\n\x12WorkflowActivation\x12\x0e\n\x06run_id\x18\x01 \x01(\t\x12-\n\ttimestamp\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x14\n\x0cis_replaying\x18\x03 \x01(\x08\x12\x16\n\x0ehistory_length\x18\x04 \x01(\r\x12@\n\x04jobs\x18\x05 \x03(\x0b\x32\x32.coresdk.workflow_activation.WorkflowActivationJob\x12 \n\x18\x61vailable_internal_flags\x18\x06 \x03(\r\x12\x1a\n\x12history_size_bytes\x18\x07 \x01(\x04\x12!\n\x19\x63ontinue_as_new_suggested\x18\x08 \x01(\x08\x12T\n#deployment_version_for_current_task\x18\t \x01(\x0b\x32\'.coresdk.common.WorkerDeploymentVersion"\xe0\n\n\x15WorkflowActivationJob\x12N\n\x13initialize_workflow\x18\x01 \x01(\x0b\x32/.coresdk.workflow_activation.InitializeWorkflowH\x00\x12<\n\nfire_timer\x18\x02 \x01(\x0b\x32&.coresdk.workflow_activation.FireTimerH\x00\x12K\n\x12update_random_seed\x18\x04 \x01(\x0b\x32-.coresdk.workflow_activation.UpdateRandomSeedH\x00\x12\x44\n\x0equery_workflow\x18\x05 \x01(\x0b\x32*.coresdk.workflow_activation.QueryWorkflowH\x00\x12\x46\n\x0f\x63\x61ncel_workflow\x18\x06 \x01(\x0b\x32+.coresdk.workflow_activation.CancelWorkflowH\x00\x12\x46\n\x0fsignal_workflow\x18\x07 \x01(\x0b\x32+.coresdk.workflow_activation.SignalWorkflowH\x00\x12H\n\x10resolve_activity\x18\x08 \x01(\x0b\x32,.coresdk.workflow_activation.ResolveActivityH\x00\x12G\n\x10notify_has_patch\x18\t \x01(\x0b\x32+.coresdk.workflow_activation.NotifyHasPatchH\x00\x12q\n&resolve_child_workflow_execution_start\x18\n \x01(\x0b\x32?.coresdk.workflow_activation.ResolveChildWorkflowExecutionStartH\x00\x12\x66\n resolve_child_workflow_execution\x18\x0b \x01(\x0b\x32:.coresdk.workflow_activation.ResolveChildWorkflowExecutionH\x00\x12\x66\n resolve_signal_external_workflow\x18\x0c \x01(\x0b\x32:.coresdk.workflow_activation.ResolveSignalExternalWorkflowH\x00\x12u\n(resolve_request_cancel_external_workflow\x18\r \x01(\x0b\x32\x41.coresdk.workflow_activation.ResolveRequestCancelExternalWorkflowH\x00\x12:\n\tdo_update\x18\x0e \x01(\x0b\x32%.coresdk.workflow_activation.DoUpdateH\x00\x12`\n\x1dresolve_nexus_operation_start\x18\x0f \x01(\x0b\x32\x37.coresdk.workflow_activation.ResolveNexusOperationStartH\x00\x12U\n\x17resolve_nexus_operation\x18\x10 \x01(\x0b\x32\x32.coresdk.workflow_activation.ResolveNexusOperationH\x00\x12I\n\x11remove_from_cache\x18\x32 \x01(\x0b\x32,.coresdk.workflow_activation.RemoveFromCacheH\x00\x42\t\n\x07variant"\xd9\n\n\x12InitializeWorkflow\x12\x15\n\rworkflow_type\x18\x01 \x01(\t\x12\x13\n\x0bworkflow_id\x18\x02 \x01(\t\x12\x32\n\targuments\x18\x03 \x03(\x0b\x32\x1f.temporal.api.common.v1.Payload\x12\x17\n\x0frandomness_seed\x18\x04 \x01(\x04\x12M\n\x07headers\x18\x05 \x03(\x0b\x32<.coresdk.workflow_activation.InitializeWorkflow.HeadersEntry\x12\x10\n\x08identity\x18\x06 \x01(\t\x12I\n\x14parent_workflow_info\x18\x07 \x01(\x0b\x32+.coresdk.common.NamespacedWorkflowExecution\x12=\n\x1aworkflow_execution_timeout\x18\x08 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x37\n\x14workflow_run_timeout\x18\t \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x38\n\x15workflow_task_timeout\x18\n \x01(\x0b\x32\x19.google.protobuf.Duration\x12\'\n\x1f\x63ontinued_from_execution_run_id\x18\x0b \x01(\t\x12J\n\x13\x63ontinued_initiator\x18\x0c \x01(\x0e\x32-.temporal.api.enums.v1.ContinueAsNewInitiator\x12;\n\x11\x63ontinued_failure\x18\r \x01(\x0b\x32 .temporal.api.failure.v1.Failure\x12@\n\x16last_completion_result\x18\x0e \x01(\x0b\x32 .temporal.api.common.v1.Payloads\x12\x1e\n\x16\x66irst_execution_run_id\x18\x0f \x01(\t\x12\x39\n\x0cretry_policy\x18\x10 \x01(\x0b\x32#.temporal.api.common.v1.RetryPolicy\x12\x0f\n\x07\x61ttempt\x18\x11 \x01(\x05\x12\x15\n\rcron_schedule\x18\x12 \x01(\t\x12\x46\n"workflow_execution_expiration_time\x18\x13 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x45\n"cron_schedule_to_schedule_interval\x18\x14 \x01(\x0b\x32\x19.google.protobuf.Duration\x12*\n\x04memo\x18\x15 \x01(\x0b\x32\x1c.temporal.api.common.v1.Memo\x12\x43\n\x11search_attributes\x18\x16 \x01(\x0b\x32(.temporal.api.common.v1.SearchAttributes\x12.\n\nstart_time\x18\x17 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12@\n\rroot_workflow\x18\x18 \x01(\x0b\x32).temporal.api.common.v1.WorkflowExecution\x12\x32\n\x08priority\x18\x19 \x01(\x0b\x32 .temporal.api.common.v1.Priority\x1aO\n\x0cHeadersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12.\n\x05value\x18\x02 \x01(\x0b\x32\x1f.temporal.api.common.v1.Payload:\x02\x38\x01"\x18\n\tFireTimer\x12\x0b\n\x03seq\x18\x01 \x01(\r"m\n\x0fResolveActivity\x12\x0b\n\x03seq\x18\x01 \x01(\r\x12;\n\x06result\x18\x02 \x01(\x0b\x32+.coresdk.activity_result.ActivityResolution\x12\x10\n\x08is_local\x18\x03 \x01(\x08"\xd1\x02\n"ResolveChildWorkflowExecutionStart\x12\x0b\n\x03seq\x18\x01 \x01(\r\x12[\n\tsucceeded\x18\x02 \x01(\x0b\x32\x46.coresdk.workflow_activation.ResolveChildWorkflowExecutionStartSuccessH\x00\x12X\n\x06\x66\x61iled\x18\x03 \x01(\x0b\x32\x46.coresdk.workflow_activation.ResolveChildWorkflowExecutionStartFailureH\x00\x12]\n\tcancelled\x18\x04 \x01(\x0b\x32H.coresdk.workflow_activation.ResolveChildWorkflowExecutionStartCancelledH\x00\x42\x08\n\x06status";\n)ResolveChildWorkflowExecutionStartSuccess\x12\x0e\n\x06run_id\x18\x01 \x01(\t"\xa6\x01\n)ResolveChildWorkflowExecutionStartFailure\x12\x13\n\x0bworkflow_id\x18\x01 \x01(\t\x12\x15\n\rworkflow_type\x18\x02 \x01(\t\x12M\n\x05\x63\x61use\x18\x03 \x01(\x0e\x32>.coresdk.child_workflow.StartChildWorkflowExecutionFailedCause"`\n+ResolveChildWorkflowExecutionStartCancelled\x12\x31\n\x07\x66\x61ilure\x18\x01 \x01(\x0b\x32 .temporal.api.failure.v1.Failure"i\n\x1dResolveChildWorkflowExecution\x12\x0b\n\x03seq\x18\x01 \x01(\r\x12;\n\x06result\x18\x02 \x01(\x0b\x32+.coresdk.child_workflow.ChildWorkflowResult"+\n\x10UpdateRandomSeed\x12\x17\n\x0frandomness_seed\x18\x01 \x01(\x04"\x84\x02\n\rQueryWorkflow\x12\x10\n\x08query_id\x18\x01 \x01(\t\x12\x12\n\nquery_type\x18\x02 \x01(\t\x12\x32\n\targuments\x18\x03 \x03(\x0b\x32\x1f.temporal.api.common.v1.Payload\x12H\n\x07headers\x18\x05 \x03(\x0b\x32\x37.coresdk.workflow_activation.QueryWorkflow.HeadersEntry\x1aO\n\x0cHeadersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12.\n\x05value\x18\x02 \x01(\x0b\x32\x1f.temporal.api.common.v1.Payload:\x02\x38\x01" \n\x0e\x43\x61ncelWorkflow\x12\x0e\n\x06reason\x18\x01 \x01(\t"\x83\x02\n\x0eSignalWorkflow\x12\x13\n\x0bsignal_name\x18\x01 \x01(\t\x12.\n\x05input\x18\x02 \x03(\x0b\x32\x1f.temporal.api.common.v1.Payload\x12\x10\n\x08identity\x18\x03 \x01(\t\x12I\n\x07headers\x18\x05 \x03(\x0b\x32\x38.coresdk.workflow_activation.SignalWorkflow.HeadersEntry\x1aO\n\x0cHeadersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12.\n\x05value\x18\x02 \x01(\x0b\x32\x1f.temporal.api.common.v1.Payload:\x02\x38\x01""\n\x0eNotifyHasPatch\x12\x10\n\x08patch_id\x18\x01 \x01(\t"_\n\x1dResolveSignalExternalWorkflow\x12\x0b\n\x03seq\x18\x01 \x01(\r\x12\x31\n\x07\x66\x61ilure\x18\x02 \x01(\x0b\x32 .temporal.api.failure.v1.Failure"f\n$ResolveRequestCancelExternalWorkflow\x12\x0b\n\x03seq\x18\x01 \x01(\r\x12\x31\n\x07\x66\x61ilure\x18\x02 \x01(\x0b\x32 .temporal.api.failure.v1.Failure"\xcb\x02\n\x08\x44oUpdate\x12\n\n\x02id\x18\x01 \x01(\t\x12\x1c\n\x14protocol_instance_id\x18\x02 \x01(\t\x12\x0c\n\x04name\x18\x03 \x01(\t\x12.\n\x05input\x18\x04 \x03(\x0b\x32\x1f.temporal.api.common.v1.Payload\x12\x43\n\x07headers\x18\x05 \x03(\x0b\x32\x32.coresdk.workflow_activation.DoUpdate.HeadersEntry\x12*\n\x04meta\x18\x06 \x01(\x0b\x32\x1c.temporal.api.update.v1.Meta\x12\x15\n\rrun_validator\x18\x07 \x01(\x08\x1aO\n\x0cHeadersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12.\n\x05value\x18\x02 \x01(\x0b\x32\x1f.temporal.api.common.v1.Payload:\x02\x38\x01"\xa7\x01\n\x1aResolveNexusOperationStart\x12\x0b\n\x03seq\x18\x01 \x01(\r\x12\x16\n\x0coperation_id\x18\x02 \x01(\tH\x00\x12\x16\n\x0cstarted_sync\x18\x03 \x01(\x08H\x00\x12\x42\n\x16\x63\x61ncelled_before_start\x18\x04 \x01(\x0b\x32 .temporal.api.failure.v1.FailureH\x00\x42\x08\n\x06status"Y\n\x15ResolveNexusOperation\x12\x0b\n\x03seq\x18\x01 \x01(\r\x12\x33\n\x06result\x18\x02 \x01(\x0b\x32#.coresdk.nexus.NexusOperationResult"\xe0\x02\n\x0fRemoveFromCache\x12\x0f\n\x07message\x18\x01 \x01(\t\x12K\n\x06reason\x18\x02 \x01(\x0e\x32;.coresdk.workflow_activation.RemoveFromCache.EvictionReason"\xee\x01\n\x0e\x45victionReason\x12\x0f\n\x0bUNSPECIFIED\x10\x00\x12\x0e\n\nCACHE_FULL\x10\x01\x12\x0e\n\nCACHE_MISS\x10\x02\x12\x12\n\x0eNONDETERMINISM\x10\x03\x12\r\n\tLANG_FAIL\x10\x04\x12\x12\n\x0eLANG_REQUESTED\x10\x05\x12\x12\n\x0eTASK_NOT_FOUND\x10\x06\x12\x15\n\x11UNHANDLED_COMMAND\x10\x07\x12\t\n\x05\x46\x41TAL\x10\x08\x12\x1f\n\x1bPAGINATION_OR_HISTORY_FETCH\x10\t\x12\x1d\n\x19WORKFLOW_EXECUTION_ENDING\x10\nB8\xea\x02\x35Temporalio::Internal::Bridge::Api::WorkflowActivationb\x06proto3' ) @@ -377,55 +377,55 @@ _DOUPDATE_HEADERSENTRY._options = None _DOUPDATE_HEADERSENTRY._serialized_options = b"8\001" _WORKFLOWACTIVATION._serialized_start = 532 - _WORKFLOWACTIVATION._serialized_end = 859 - _WORKFLOWACTIVATIONJOB._serialized_start = 862 - _WORKFLOWACTIVATIONJOB._serialized_end = 2238 - _INITIALIZEWORKFLOW._serialized_start = 2241 - _INITIALIZEWORKFLOW._serialized_end = 3610 - _INITIALIZEWORKFLOW_HEADERSENTRY._serialized_start = 3531 - _INITIALIZEWORKFLOW_HEADERSENTRY._serialized_end = 3610 - _FIRETIMER._serialized_start = 3612 - _FIRETIMER._serialized_end = 3636 - _RESOLVEACTIVITY._serialized_start = 3638 - _RESOLVEACTIVITY._serialized_end = 3747 - _RESOLVECHILDWORKFLOWEXECUTIONSTART._serialized_start = 3750 - _RESOLVECHILDWORKFLOWEXECUTIONSTART._serialized_end = 4087 - _RESOLVECHILDWORKFLOWEXECUTIONSTARTSUCCESS._serialized_start = 4089 - _RESOLVECHILDWORKFLOWEXECUTIONSTARTSUCCESS._serialized_end = 4148 - _RESOLVECHILDWORKFLOWEXECUTIONSTARTFAILURE._serialized_start = 4151 - _RESOLVECHILDWORKFLOWEXECUTIONSTARTFAILURE._serialized_end = 4317 - _RESOLVECHILDWORKFLOWEXECUTIONSTARTCANCELLED._serialized_start = 4319 - _RESOLVECHILDWORKFLOWEXECUTIONSTARTCANCELLED._serialized_end = 4415 - _RESOLVECHILDWORKFLOWEXECUTION._serialized_start = 4417 - _RESOLVECHILDWORKFLOWEXECUTION._serialized_end = 4522 - _UPDATERANDOMSEED._serialized_start = 4524 - _UPDATERANDOMSEED._serialized_end = 4567 - _QUERYWORKFLOW._serialized_start = 4570 - _QUERYWORKFLOW._serialized_end = 4830 - _QUERYWORKFLOW_HEADERSENTRY._serialized_start = 3531 - _QUERYWORKFLOW_HEADERSENTRY._serialized_end = 3610 - _CANCELWORKFLOW._serialized_start = 4832 - _CANCELWORKFLOW._serialized_end = 4864 - _SIGNALWORKFLOW._serialized_start = 4867 - _SIGNALWORKFLOW._serialized_end = 5126 - _SIGNALWORKFLOW_HEADERSENTRY._serialized_start = 3531 - _SIGNALWORKFLOW_HEADERSENTRY._serialized_end = 3610 - _NOTIFYHASPATCH._serialized_start = 5128 - _NOTIFYHASPATCH._serialized_end = 5162 - _RESOLVESIGNALEXTERNALWORKFLOW._serialized_start = 5164 - _RESOLVESIGNALEXTERNALWORKFLOW._serialized_end = 5259 - _RESOLVEREQUESTCANCELEXTERNALWORKFLOW._serialized_start = 5261 - _RESOLVEREQUESTCANCELEXTERNALWORKFLOW._serialized_end = 5363 - _DOUPDATE._serialized_start = 5366 - _DOUPDATE._serialized_end = 5697 - _DOUPDATE_HEADERSENTRY._serialized_start = 3531 - _DOUPDATE_HEADERSENTRY._serialized_end = 3610 - _RESOLVENEXUSOPERATIONSTART._serialized_start = 5700 - _RESOLVENEXUSOPERATIONSTART._serialized_end = 5867 - _RESOLVENEXUSOPERATION._serialized_start = 5869 - _RESOLVENEXUSOPERATION._serialized_end = 5958 - _REMOVEFROMCACHE._serialized_start = 5961 - _REMOVEFROMCACHE._serialized_end = 6313 - _REMOVEFROMCACHE_EVICTIONREASON._serialized_start = 6075 - _REMOVEFROMCACHE_EVICTIONREASON._serialized_end = 6313 + _WORKFLOWACTIVATION._serialized_end = 910 + _WORKFLOWACTIVATIONJOB._serialized_start = 913 + _WORKFLOWACTIVATIONJOB._serialized_end = 2289 + _INITIALIZEWORKFLOW._serialized_start = 2292 + _INITIALIZEWORKFLOW._serialized_end = 3661 + _INITIALIZEWORKFLOW_HEADERSENTRY._serialized_start = 3582 + _INITIALIZEWORKFLOW_HEADERSENTRY._serialized_end = 3661 + _FIRETIMER._serialized_start = 3663 + _FIRETIMER._serialized_end = 3687 + _RESOLVEACTIVITY._serialized_start = 3689 + _RESOLVEACTIVITY._serialized_end = 3798 + _RESOLVECHILDWORKFLOWEXECUTIONSTART._serialized_start = 3801 + _RESOLVECHILDWORKFLOWEXECUTIONSTART._serialized_end = 4138 + _RESOLVECHILDWORKFLOWEXECUTIONSTARTSUCCESS._serialized_start = 4140 + _RESOLVECHILDWORKFLOWEXECUTIONSTARTSUCCESS._serialized_end = 4199 + _RESOLVECHILDWORKFLOWEXECUTIONSTARTFAILURE._serialized_start = 4202 + _RESOLVECHILDWORKFLOWEXECUTIONSTARTFAILURE._serialized_end = 4368 + _RESOLVECHILDWORKFLOWEXECUTIONSTARTCANCELLED._serialized_start = 4370 + _RESOLVECHILDWORKFLOWEXECUTIONSTARTCANCELLED._serialized_end = 4466 + _RESOLVECHILDWORKFLOWEXECUTION._serialized_start = 4468 + _RESOLVECHILDWORKFLOWEXECUTION._serialized_end = 4573 + _UPDATERANDOMSEED._serialized_start = 4575 + _UPDATERANDOMSEED._serialized_end = 4618 + _QUERYWORKFLOW._serialized_start = 4621 + _QUERYWORKFLOW._serialized_end = 4881 + _QUERYWORKFLOW_HEADERSENTRY._serialized_start = 3582 + _QUERYWORKFLOW_HEADERSENTRY._serialized_end = 3661 + _CANCELWORKFLOW._serialized_start = 4883 + _CANCELWORKFLOW._serialized_end = 4915 + _SIGNALWORKFLOW._serialized_start = 4918 + _SIGNALWORKFLOW._serialized_end = 5177 + _SIGNALWORKFLOW_HEADERSENTRY._serialized_start = 3582 + _SIGNALWORKFLOW_HEADERSENTRY._serialized_end = 3661 + _NOTIFYHASPATCH._serialized_start = 5179 + _NOTIFYHASPATCH._serialized_end = 5213 + _RESOLVESIGNALEXTERNALWORKFLOW._serialized_start = 5215 + _RESOLVESIGNALEXTERNALWORKFLOW._serialized_end = 5310 + _RESOLVEREQUESTCANCELEXTERNALWORKFLOW._serialized_start = 5312 + _RESOLVEREQUESTCANCELEXTERNALWORKFLOW._serialized_end = 5414 + _DOUPDATE._serialized_start = 5417 + _DOUPDATE._serialized_end = 5748 + _DOUPDATE_HEADERSENTRY._serialized_start = 3582 + _DOUPDATE_HEADERSENTRY._serialized_end = 3661 + _RESOLVENEXUSOPERATIONSTART._serialized_start = 5751 + _RESOLVENEXUSOPERATIONSTART._serialized_end = 5918 + _RESOLVENEXUSOPERATION._serialized_start = 5920 + _RESOLVENEXUSOPERATION._serialized_end = 6009 + _REMOVEFROMCACHE._serialized_start = 6012 + _REMOVEFROMCACHE._serialized_end = 6364 + _REMOVEFROMCACHE_EVICTIONREASON._serialized_start = 6126 + _REMOVEFROMCACHE_EVICTIONREASON._serialized_end = 6364 # @@protoc_insertion_point(module_scope) diff --git a/temporalio/bridge/proto/workflow_activation/workflow_activation_pb2.pyi b/temporalio/bridge/proto/workflow_activation/workflow_activation_pb2.pyi index 5eb734a1b..e042672b9 100644 --- a/temporalio/bridge/proto/workflow_activation/workflow_activation_pb2.pyi +++ b/temporalio/bridge/proto/workflow_activation/workflow_activation_pb2.pyi @@ -53,15 +53,15 @@ class WorkflowActivation(google.protobuf.message.Message): This is because: * Patches are expected to apply to the entire activation * Signal and update handlers should be invoked before workflow routines are iterated. That is to - say before the users' main workflow function and anything spawned by it is allowed to continue. + say before the users' main workflow function and anything spawned by it is allowed to continue. * Local activities resolutions go after other normal jobs because while *not* replaying, they - will always take longer than anything else that produces an immediate job (which is - effectively instant). When *replaying* we need to scan ahead for LA markers so that we can - resolve them in the same activation that they completed in when not replaying. However, doing - so would, by default, put those resolutions *before* any other immediate jobs that happened - in that same activation (prime example: cancelling not-wait-for-cancel activities). So, we do - this to ensure the LA resolution happens after that cancel (or whatever else it may be) as it - normally would have when executing. + will always take longer than anything else that produces an immediate job (which is + effectively instant). When *replaying* we need to scan ahead for LA markers so that we can + resolve them in the same activation that they completed in when not replaying. However, doing + so would, by default, put those resolutions *before* any other immediate jobs that happened + in that same activation (prime example: cancelling not-wait-for-cancel activities). So, we do + this to ensure the LA resolution happens after that cancel (or whatever else it may be) as it + normally would have when executing. * Queries always go last (and, in fact, always come in their own activation) * Evictions also always come in their own activation @@ -91,7 +91,7 @@ class WorkflowActivation(google.protobuf.message.Message): AVAILABLE_INTERNAL_FLAGS_FIELD_NUMBER: builtins.int HISTORY_SIZE_BYTES_FIELD_NUMBER: builtins.int CONTINUE_AS_NEW_SUGGESTED_FIELD_NUMBER: builtins.int - BUILD_ID_FOR_CURRENT_TASK_FIELD_NUMBER: builtins.int + DEPLOYMENT_VERSION_FOR_CURRENT_TASK_FIELD_NUMBER: builtins.int run_id: builtins.str """The id of the currently active run of the workflow. Also used as a cache key. There may only ever be one active workflow task (and hence activation) of a run at one time. @@ -124,11 +124,18 @@ class WorkflowActivation(google.protobuf.message.Message): """The history size in bytes as of the last WFT started event""" continue_as_new_suggested: builtins.bool """Set true if the most recent WFT started event had this suggestion""" - build_id_for_current_task: builtins.str - """Set to the Build ID of the worker that processed this task, which may be empty. During replay - this id may not equal the id of the replaying worker. If not replaying and this worker has - a defined Build ID, it will equal that ID. It will also be empty for evict-only activations. - """ + @property + def deployment_version_for_current_task( + self, + ) -> temporalio.bridge.proto.common.common_pb2.WorkerDeploymentVersion: + """Set to the deployment version of the worker that processed this task, + which may be empty. During replay this version may not equal the version + of the replaying worker. If not replaying and this worker has a defined + Deployment Version, it will equal that. It will also be empty for + evict-only activations. The deployment name may be empty, but not the + build id, if this worker was using the deprecated Build ID-only + feature(s). + """ def __init__( self, *, @@ -140,20 +147,27 @@ class WorkflowActivation(google.protobuf.message.Message): available_internal_flags: collections.abc.Iterable[builtins.int] | None = ..., history_size_bytes: builtins.int = ..., continue_as_new_suggested: builtins.bool = ..., - build_id_for_current_task: builtins.str = ..., + deployment_version_for_current_task: temporalio.bridge.proto.common.common_pb2.WorkerDeploymentVersion + | None = ..., ) -> None: ... def HasField( - self, field_name: typing_extensions.Literal["timestamp", b"timestamp"] + self, + field_name: typing_extensions.Literal[ + "deployment_version_for_current_task", + b"deployment_version_for_current_task", + "timestamp", + b"timestamp", + ], ) -> builtins.bool: ... def ClearField( self, field_name: typing_extensions.Literal[ "available_internal_flags", b"available_internal_flags", - "build_id_for_current_task", - b"build_id_for_current_task", "continue_as_new_suggested", b"continue_as_new_suggested", + "deployment_version_for_current_task", + b"deployment_version_for_current_task", "history_length", b"history_length", "history_size_bytes", diff --git a/temporalio/bridge/proto/workflow_completion/workflow_completion_pb2.py b/temporalio/bridge/proto/workflow_completion/workflow_completion_pb2.py index 5b162cce5..ce26b220d 100644 --- a/temporalio/bridge/proto/workflow_completion/workflow_completion_pb2.py +++ b/temporalio/bridge/proto/workflow_completion/workflow_completion_pb2.py @@ -17,6 +17,9 @@ from temporalio.api.enums.v1 import ( failed_cause_pb2 as temporal_dot_api_dot_enums_dot_v1_dot_failed__cause__pb2, ) +from temporalio.api.enums.v1 import ( + workflow_pb2 as temporal_dot_api_dot_enums_dot_v1_dot_workflow__pb2, +) from temporalio.api.failure.v1 import ( message_pb2 as temporal_dot_api_dot_failure_dot_v1_dot_message__pb2, ) @@ -28,7 +31,7 @@ ) DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( - b'\n?temporal/sdk/core/workflow_completion/workflow_completion.proto\x12\x1b\x63oresdk.workflow_completion\x1a%temporal/api/failure/v1/message.proto\x1a(temporal/api/enums/v1/failed_cause.proto\x1a%temporal/sdk/core/common/common.proto\x1a;temporal/sdk/core/workflow_commands/workflow_commands.proto"\xac\x01\n\x1cWorkflowActivationCompletion\x12\x0e\n\x06run_id\x18\x01 \x01(\t\x12:\n\nsuccessful\x18\x02 \x01(\x0b\x32$.coresdk.workflow_completion.SuccessH\x00\x12\x36\n\x06\x66\x61iled\x18\x03 \x01(\x0b\x32$.coresdk.workflow_completion.FailureH\x00\x42\x08\n\x06status"d\n\x07Success\x12<\n\x08\x63ommands\x18\x01 \x03(\x0b\x32*.coresdk.workflow_commands.WorkflowCommand\x12\x1b\n\x13used_internal_flags\x18\x06 \x03(\r"\x81\x01\n\x07\x46\x61ilure\x12\x31\n\x07\x66\x61ilure\x18\x01 \x01(\x0b\x32 .temporal.api.failure.v1.Failure\x12\x43\n\x0b\x66orce_cause\x18\x02 \x01(\x0e\x32..temporal.api.enums.v1.WorkflowTaskFailedCauseB8\xea\x02\x35Temporalio::Internal::Bridge::Api::WorkflowCompletionb\x06proto3' + b'\n?temporal/sdk/core/workflow_completion/workflow_completion.proto\x12\x1b\x63oresdk.workflow_completion\x1a%temporal/api/failure/v1/message.proto\x1a(temporal/api/enums/v1/failed_cause.proto\x1a$temporal/api/enums/v1/workflow.proto\x1a%temporal/sdk/core/common/common.proto\x1a;temporal/sdk/core/workflow_commands/workflow_commands.proto"\xac\x01\n\x1cWorkflowActivationCompletion\x12\x0e\n\x06run_id\x18\x01 \x01(\t\x12:\n\nsuccessful\x18\x02 \x01(\x0b\x32$.coresdk.workflow_completion.SuccessH\x00\x12\x36\n\x06\x66\x61iled\x18\x03 \x01(\x0b\x32$.coresdk.workflow_completion.FailureH\x00\x42\x08\n\x06status"\xac\x01\n\x07Success\x12<\n\x08\x63ommands\x18\x01 \x03(\x0b\x32*.coresdk.workflow_commands.WorkflowCommand\x12\x1b\n\x13used_internal_flags\x18\x06 \x03(\r\x12\x46\n\x13versioning_behavior\x18\x07 \x01(\x0e\x32).temporal.api.enums.v1.VersioningBehavior"\x81\x01\n\x07\x46\x61ilure\x12\x31\n\x07\x66\x61ilure\x18\x01 \x01(\x0b\x32 .temporal.api.failure.v1.Failure\x12\x43\n\x0b\x66orce_cause\x18\x02 \x01(\x0e\x32..temporal.api.enums.v1.WorkflowTaskFailedCauseB8\xea\x02\x35Temporalio::Internal::Bridge::Api::WorkflowCompletionb\x06proto3' ) @@ -75,10 +78,10 @@ DESCRIPTOR._serialized_options = ( b"\352\0025Temporalio::Internal::Bridge::Api::WorkflowCompletion" ) - _WORKFLOWACTIVATIONCOMPLETION._serialized_start = 278 - _WORKFLOWACTIVATIONCOMPLETION._serialized_end = 450 - _SUCCESS._serialized_start = 452 - _SUCCESS._serialized_end = 552 - _FAILURE._serialized_start = 555 - _FAILURE._serialized_end = 684 + _WORKFLOWACTIVATIONCOMPLETION._serialized_start = 316 + _WORKFLOWACTIVATIONCOMPLETION._serialized_end = 488 + _SUCCESS._serialized_start = 491 + _SUCCESS._serialized_end = 663 + _FAILURE._serialized_start = 666 + _FAILURE._serialized_end = 795 # @@protoc_insertion_point(module_scope) diff --git a/temporalio/bridge/proto/workflow_completion/workflow_completion_pb2.pyi b/temporalio/bridge/proto/workflow_completion/workflow_completion_pb2.pyi index 5568b9eef..5b438f360 100644 --- a/temporalio/bridge/proto/workflow_completion/workflow_completion_pb2.pyi +++ b/temporalio/bridge/proto/workflow_completion/workflow_completion_pb2.pyi @@ -12,6 +12,7 @@ import google.protobuf.internal.containers import google.protobuf.message import temporalio.api.enums.v1.failed_cause_pb2 +import temporalio.api.enums.v1.workflow_pb2 import temporalio.api.failure.v1.message_pb2 import temporalio.bridge.proto.workflow_commands.workflow_commands_pb2 @@ -75,6 +76,7 @@ class Success(google.protobuf.message.Message): COMMANDS_FIELD_NUMBER: builtins.int USED_INTERNAL_FLAGS_FIELD_NUMBER: builtins.int + VERSIONING_BEHAVIOR_FIELD_NUMBER: builtins.int @property def commands( self, @@ -87,6 +89,10 @@ class Success(google.protobuf.message.Message): self, ) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: """Any internal flags which the lang SDK used in the processing of this activation""" + versioning_behavior: ( + temporalio.api.enums.v1.workflow_pb2.VersioningBehavior.ValueType + ) + """The versioning behavior this workflow is currently using""" def __init__( self, *, @@ -95,11 +101,17 @@ class Success(google.protobuf.message.Message): ] | None = ..., used_internal_flags: collections.abc.Iterable[builtins.int] | None = ..., + versioning_behavior: temporalio.api.enums.v1.workflow_pb2.VersioningBehavior.ValueType = ..., ) -> None: ... def ClearField( self, field_name: typing_extensions.Literal[ - "commands", b"commands", "used_internal_flags", b"used_internal_flags" + "commands", + b"commands", + "used_internal_flags", + b"used_internal_flags", + "versioning_behavior", + b"versioning_behavior", ], ) -> None: ... diff --git a/temporalio/bridge/sdk-core b/temporalio/bridge/sdk-core index 95db75dc9..e18982ec7 160000 --- a/temporalio/bridge/sdk-core +++ b/temporalio/bridge/sdk-core @@ -1 +1 @@ -Subproject commit 95db75dc950cf07a99c79e6794172572dd34e6a6 +Subproject commit e18982ec72be62e357a5ea418b1670c8b2fee55f diff --git a/temporalio/bridge/src/client.rs b/temporalio/bridge/src/client.rs index 2e5866186..83c7f79fc 100644 --- a/temporalio/bridge/src/client.rs +++ b/temporalio/bridge/src/client.rs @@ -139,18 +139,40 @@ impl ClientRef { "delete_schedule" => { rpc_call!(retry_client, call, delete_schedule) } + "delete_worker_deployment" => { + rpc_call!(retry_client, call, delete_worker_deployment) + } + "delete_worker_deployment_version" => { + rpc_call!(retry_client, call, delete_worker_deployment_version) + } "delete_workflow_execution" => { rpc_call!(retry_client, call, delete_workflow_execution) } + "describe_batch_operation" => { + rpc_call!(retry_client, call, describe_batch_operation) + } + "describe_deployment" => { + rpc_call!(retry_client, call, describe_deployment) + } "deprecate_namespace" => rpc_call!(retry_client, call, deprecate_namespace), "describe_namespace" => rpc_call!(retry_client, call, describe_namespace), "describe_schedule" => rpc_call!(retry_client, call, describe_schedule), "describe_task_queue" => rpc_call!(retry_client, call, describe_task_queue), + "describe_worker_deployment" => { + rpc_call!(retry_client, call, describe_worker_deployment) + } + "describe_worker_deployment_version" => { + rpc_call!(retry_client, call, describe_worker_deployment_version) + } "describe_workflow_execution" => { rpc_call!(retry_client, call, describe_workflow_execution) } "execute_multi_operation" => rpc_call!(retry_client, call, execute_multi_operation), "get_cluster_info" => rpc_call!(retry_client, call, get_cluster_info), + "get_current_deployment" => rpc_call!(retry_client, call, get_current_deployment), + "get_deployment_reachability" => { + rpc_call!(retry_client, call, get_deployment_reachability) + } "get_search_attributes" => { rpc_call!(retry_client, call, get_search_attributes) } @@ -176,6 +198,9 @@ impl ClientRef { "list_closed_workflow_executions" => { rpc_call!(retry_client, call, list_closed_workflow_executions) } + "list_deployments" => { + rpc_call!(retry_client, call, list_deployments) + } "list_namespaces" => rpc_call!(retry_client, call, list_namespaces), "list_open_workflow_executions" => { rpc_call!(retry_client, call, list_open_workflow_executions) @@ -189,6 +214,9 @@ impl ClientRef { "list_task_queue_partitions" => { rpc_call!(retry_client, call, list_task_queue_partitions) } + "list_worker_deployments" => { + rpc_call!(retry_client, call, list_worker_deployments) + } "list_workflow_executions" => { rpc_call!(retry_client, call, list_workflow_executions) } @@ -258,6 +286,18 @@ impl ClientRef { "scan_workflow_executions" => { rpc_call!(retry_client, call, scan_workflow_executions) } + "set_current_deployment" => { + rpc_call!(retry_client, call, set_current_deployment) + } + "set_worker_deployment_current_version" => { + rpc_call!(retry_client, call, set_worker_deployment_current_version) + } + "set_worker_deployment_ramping_version" => { + rpc_call!(retry_client, call, set_worker_deployment_ramping_version) + } + "shutdown_worker" => { + rpc_call!(retry_client, call, shutdown_worker) + } "signal_with_start_workflow_execution" => { rpc_call!(retry_client, call, signal_with_start_workflow_execution) } @@ -274,6 +314,13 @@ impl ClientRef { rpc_call_on_trait!(retry_client, call, WorkflowService, update_namespace) } "update_schedule" => rpc_call!(retry_client, call, update_schedule), + "update_worker_deployment_version_metadata" => { + rpc_call!( + retry_client, + call, + update_worker_deployment_version_metadata + ) + } "update_workflow_execution" => { rpc_call!(retry_client, call, update_workflow_execution) } diff --git a/temporalio/bridge/src/worker.rs b/temporalio/bridge/src/worker.rs index 3f8baa5e7..d9a6487b6 100644 --- a/temporalio/bridge/src/worker.rs +++ b/temporalio/bridge/src/worker.rs @@ -5,7 +5,7 @@ use log::error; use prost::Message; use pyo3::exceptions::{PyException, PyRuntimeError, PyValueError}; use pyo3::prelude::*; -use pyo3::types::{PyBytes, PyTuple}; +use pyo3::types::{PyBytes, PyDict, PyTuple}; use std::collections::HashMap; use std::collections::HashSet; use std::marker::PhantomData; @@ -15,8 +15,9 @@ use temporal_sdk_core::api::errors::PollError; use temporal_sdk_core::replay::{HistoryForReplay, ReplayWorkerInput}; use temporal_sdk_core_api::errors::WorkflowErrorType; use temporal_sdk_core_api::worker::{ - SlotInfo, SlotInfoTrait, SlotKind, SlotKindType, SlotMarkUsedContext, SlotReleaseContext, - SlotReservationContext, SlotSupplier as SlotSupplierTrait, SlotSupplierPermit, + PollerBehavior, SlotInfo, SlotInfoTrait, SlotKind, SlotKindType, SlotMarkUsedContext, + SlotReleaseContext, SlotReservationContext, SlotSupplier as SlotSupplierTrait, + SlotSupplierPermit, }; use temporal_sdk_core_api::Worker; use temporal_sdk_core_protos::coresdk::workflow_completion::WorkflowActivationCompletion; @@ -44,7 +45,7 @@ pub struct WorkerRef { pub struct WorkerConfig { namespace: String, task_queue: String, - build_id: String, + versioning_strategy: WorkerVersioningStrategy, identity_override: Option, max_cached_workflows: usize, tuner: TunerHolder, @@ -58,11 +59,63 @@ pub struct WorkerConfig { max_activities_per_second: Option, max_task_queue_activities_per_second: Option, graceful_shutdown_period_millis: u64, - use_worker_versioning: bool, nondeterminism_as_workflow_fail: bool, nondeterminism_as_workflow_fail_for_types: HashSet, } +/// Recreates [temporal_sdk_core_api::worker::WorkerVersioningStrategy] +#[derive(FromPyObject)] +pub enum WorkerVersioningStrategy { + None(WorkerVersioningNone), + DeploymentBased(WorkerDeploymentOptions), + LegacyBuildIdBased(LegacyBuildIdBased), +} + +#[derive(FromPyObject)] +pub struct WorkerVersioningNone { + pub build_id: String, +} + +/// Recreates [temporal_sdk_core_api::worker::WorkerDeploymentOptions] +#[derive(FromPyObject)] +pub struct WorkerDeploymentOptions { + pub version: WorkerDeploymentVersion, + pub use_worker_versioning: bool, + /// This is a [enums::v1::VersioningBehavior] represented as i32 + pub default_versioning_behavior: i32, +} + +#[derive(FromPyObject)] +pub struct LegacyBuildIdBased { + pub build_id: String, +} + +/// Recreates [temporal_sdk_core_api::worker::WorkerDeploymentVersion] +#[derive(FromPyObject, Clone)] +pub struct WorkerDeploymentVersion { + pub deployment_name: String, + pub build_id: String, +} + +impl IntoPy> for WorkerDeploymentVersion { + fn into_py(self, py: Python) -> Py { + let dict = PyDict::new(py); + dict.set_item("deployment_name", self.deployment_name) + .unwrap(); + dict.set_item("build_id", self.build_id).unwrap(); + dict.into() + } +} + +impl From for WorkerDeploymentVersion { + fn from(version: temporal_sdk_core_api::worker::WorkerDeploymentVersion) -> Self { + WorkerDeploymentVersion { + deployment_name: version.deployment_name, + build_id: version.build_id, + } + } +} + #[derive(FromPyObject)] pub struct TunerHolder { workflow_slot_supplier: SlotSupplier, @@ -102,6 +155,8 @@ pub struct SlotReserveCtx { #[pyo3(get)] pub worker_build_id: String, #[pyo3(get)] + pub worker_deployment_version: Option, + #[pyo3(get)] pub is_sticky: bool, } @@ -116,7 +171,12 @@ impl SlotReserveCtx { }, task_queue: ctx.task_queue().to_string(), worker_identity: ctx.worker_identity().to_string(), - worker_build_id: ctx.worker_build_id().to_string(), + worker_build_id: ctx + .worker_deployment_version() + .clone() + .map(|v| v.build_id) + .unwrap_or_default(), + worker_deployment_version: ctx.worker_deployment_version().clone().map(Into::into), is_sticky: ctx.is_sticky(), } } @@ -559,16 +619,21 @@ fn convert_worker_config( task_locals: Arc>, ) -> PyResult { let converted_tuner = convert_tuner_holder(conf.tuner, task_locals)?; + let converted_versioning_strategy = convert_versioning_strategy(conf.versioning_strategy); temporal_sdk_core::WorkerConfigBuilder::default() .namespace(conf.namespace) .task_queue(conf.task_queue) - .worker_build_id(conf.build_id) + .versioning_strategy(converted_versioning_strategy) .client_identity_override(conf.identity_override) .max_cached_workflows(conf.max_cached_workflows) - .max_concurrent_wft_polls(conf.max_concurrent_workflow_task_polls) + .workflow_task_poller_behavior(PollerBehavior::SimpleMaximum( + conf.max_concurrent_workflow_task_polls, + )) .tuner(Arc::new(converted_tuner)) .nonsticky_to_sticky_poll_ratio(conf.nonsticky_to_sticky_poll_ratio) - .max_concurrent_at_polls(conf.max_concurrent_activity_task_polls) + .activity_task_poller_behavior(PollerBehavior::SimpleMaximum( + conf.max_concurrent_activity_task_polls, + )) .no_remote_activities(conf.no_remote_activities) .sticky_queue_schedule_to_start_timeout(Duration::from_millis( conf.sticky_queue_schedule_to_start_timeout_millis, @@ -585,7 +650,6 @@ fn convert_worker_config( // auto-cancel-activity behavior of shutdown will not occur, so we // always set it even if 0. .graceful_shutdown_period(Duration::from_millis(conf.graceful_shutdown_period_millis)) - .use_worker_versioning(conf.use_worker_versioning) .workflow_failure_errors(if conf.nondeterminism_as_workflow_fail { HashSet::from([WorkflowErrorType::Nondeterminism]) } else { @@ -702,6 +766,40 @@ fn convert_slot_supplier( }) } +fn convert_versioning_strategy( + strategy: WorkerVersioningStrategy, +) -> temporal_sdk_core_api::worker::WorkerVersioningStrategy { + match strategy { + WorkerVersioningStrategy::None(vn) => { + temporal_sdk_core_api::worker::WorkerVersioningStrategy::None { + build_id: vn.build_id, + } + } + WorkerVersioningStrategy::DeploymentBased(options) => { + temporal_sdk_core_api::worker::WorkerVersioningStrategy::WorkerDeploymentBased( + temporal_sdk_core_api::worker::WorkerDeploymentOptions { + version: temporal_sdk_core_api::worker::WorkerDeploymentVersion { + deployment_name: options.version.deployment_name, + build_id: options.version.build_id, + }, + use_worker_versioning: options.use_worker_versioning, + default_versioning_behavior: Some( + options + .default_versioning_behavior + .try_into() + .unwrap_or_default(), + ), + }, + ) + } + WorkerVersioningStrategy::LegacyBuildIdBased(lb) => { + temporal_sdk_core_api::worker::WorkerVersioningStrategy::LegacyBuildIdBased { + build_id: lb.build_id, + } + } + } +} + /// For feeding histories into core during replay #[pyclass] pub struct HistoryPusher { diff --git a/temporalio/bridge/worker.py b/temporalio/bridge/worker.py index 77c7114e8..3ef78ec76 100644 --- a/temporalio/bridge/worker.py +++ b/temporalio/bridge/worker.py @@ -44,7 +44,7 @@ class WorkerConfig: namespace: str task_queue: str - build_id: str + versioning_strategy: WorkerVersioningStrategy identity_override: Optional[str] max_cached_workflows: int tuner: TunerHolder @@ -58,11 +58,49 @@ class WorkerConfig: max_activities_per_second: Optional[float] max_task_queue_activities_per_second: Optional[float] graceful_shutdown_period_millis: int - use_worker_versioning: bool nondeterminism_as_workflow_fail: bool nondeterminism_as_workflow_fail_for_types: Set[str] +@dataclass +class WorkerDeploymentVersion: + """Python representation of the Rust struct for configuring a worker deployment version.""" + + deployment_name: str + build_id: str + + +@dataclass +class WorkerDeploymentOptions: + """Python representation of the Rust struct for configuring a worker deployment options.""" + + version: WorkerDeploymentVersion + use_worker_versioning: bool + default_versioning_behavior: int + """An enums.v1.VersioningBehavior as an int""" + + +@dataclass +class WorkerVersioningStrategyNone: + """Python representation of the Rust struct for configuring a worker versioning strategy None.""" + + build_id: str + + +@dataclass +class WorkerVersioningStrategyLegacyBuildIdBased: + """Python representation of the Rust struct for configuring a worker versioning strategy legacy Build ID-based.""" + + build_id: str + + +WorkerVersioningStrategy: TypeAlias = Union[ + WorkerVersioningStrategyNone, + WorkerDeploymentOptions, + WorkerVersioningStrategyLegacyBuildIdBased, +] + + @dataclass class ResourceBasedTunerConfig: """Python representation of the Rust struct for configuring a resource-based tuner.""" diff --git a/temporalio/common.py b/temporalio/common.py index 380d97f40..5ef3eaf28 100644 --- a/temporalio/common.py +++ b/temporalio/common.py @@ -8,7 +8,7 @@ from abc import ABC, abstractmethod from dataclasses import dataclass from datetime import datetime, timedelta -from enum import IntEnum +from enum import Enum, IntEnum from typing import ( Any, Callable, @@ -1016,6 +1016,54 @@ def __post_init__(self): Priority.default = Priority(priority_key=None) + +class VersioningBehavior(IntEnum): + """Specifies when a workflow might move from a worker of one Build Id to another. + + WARNING: Experimental API. + """ + + UNSPECIFIED = ( + temporalio.api.enums.v1.VersioningBehavior.VERSIONING_BEHAVIOR_UNSPECIFIED + ) + """ An unspecified versioning behavior. By default, workers opting into worker versioning will + be required to specify a behavior. See :py:class:`temporalio.worker.WorkerDeploymentOptions`.""" + PINNED = temporalio.api.enums.v1.VersioningBehavior.VERSIONING_BEHAVIOR_PINNED + """The workflow will be pinned to the current Build ID unless manually moved.""" + AUTO_UPGRADE = ( + temporalio.api.enums.v1.VersioningBehavior.VERSIONING_BEHAVIOR_AUTO_UPGRADE + ) + """The workflow will automatically move to the latest version (default Build ID of the task + queue) when the next task is dispatched.""" + + +@dataclass(frozen=True) +class WorkerDeploymentVersion: + """Represents the version of a specific worker deployment. + + WARNING: Experimental API. + """ + + deployment_name: str + build_id: str + + def to_canonical_string(self) -> str: + """Returns the canonical string representation of the version.""" + return f"{self.deployment_name}.{self.build_id}" + + @staticmethod + def from_canonical_string(canonical: str) -> WorkerDeploymentVersion: + """Parse a version from a canonical string, which must be in the format + `.`. Deployment name must not have a `.` in it. + """ + parts = canonical.split(".", maxsplit=1) + if len(parts) != 2: + raise ValueError( + f"Cannot parse version string: {canonical}, must be in format ." + ) + return WorkerDeploymentVersion(parts[0], parts[1]) + + # Should be set as the "arg" argument for _arg_or_args checks where the argument # is unset. This is different than None which is a legitimate argument. _arg_unset = object() diff --git a/temporalio/worker/__init__.py b/temporalio/worker/__init__.py index 5a6451dbf..96e37d3b3 100644 --- a/temporalio/worker/__init__.py +++ b/temporalio/worker/__init__.py @@ -42,7 +42,12 @@ WorkerTuner, WorkflowSlotInfo, ) -from ._worker import Worker, WorkerConfig +from ._worker import ( + Worker, + WorkerConfig, + WorkerDeploymentConfig, + WorkerDeploymentVersion, +) from ._workflow_instance import ( UnsandboxedWorkflowRunner, WorkflowInstance, @@ -54,6 +59,8 @@ # Primary types "Worker", "WorkerConfig", + "WorkerDeploymentConfig", + "WorkerDeploymentVersion", "Replayer", "ReplayerConfig", "WorkflowReplayResult", diff --git a/temporalio/worker/_replayer.py b/temporalio/worker/_replayer.py index b90de9916..238d64ace 100644 --- a/temporalio/worker/_replayer.py +++ b/temporalio/worker/_replayer.py @@ -215,6 +215,7 @@ def on_eviction_hook( on_eviction_hook=on_eviction_hook, disable_eager_activity_execution=False, disable_safe_eviction=self._config["disable_safe_workflow_eviction"], + should_enforce_versioning_behavior=False, ) # Create bridge worker bridge_worker, pusher = temporalio.bridge.worker.Worker.for_replay( @@ -222,7 +223,6 @@ def on_eviction_hook( temporalio.bridge.worker.WorkerConfig( namespace=self._config["namespace"], task_queue=task_queue, - build_id=self._config["build_id"] or load_default_build_id(), identity_override=self._config["identity"], # Need to tell core whether we want to consider all # non-determinism exceptions as workflow fail, and whether we do @@ -252,7 +252,9 @@ def on_eviction_hook( max_activities_per_second=None, max_task_queue_activities_per_second=None, graceful_shutdown_period_millis=0, - use_worker_versioning=False, + versioning_strategy=temporalio.bridge.worker.WorkerVersioningStrategyNone( + build_id=self._config["build_id"] or load_default_build_id(), + ), ), ) # Start worker @@ -338,7 +340,7 @@ class WorkflowReplayResult: replay_failure: Optional[Exception] """Failure during replay if any. - + This does not mean your workflow exited by raising an error, but rather that some task failure such as :py:class:`temporalio.workflow.NondeterminismError` was encountered during diff --git a/temporalio/worker/_tuning.py b/temporalio/worker/_tuning.py index 3f28c0698..b74f79d5e 100644 --- a/temporalio/worker/_tuning.py +++ b/temporalio/worker/_tuning.py @@ -8,6 +8,7 @@ from typing_extensions import TypeAlias import temporalio.bridge.worker +from temporalio.common import WorkerDeploymentVersion _DEFAULT_RESOURCE_ACTIVITY_MAX = 500 @@ -55,7 +56,7 @@ class ResourceBasedSlotConfig: ramp_throttle: Optional[timedelta] = None """Minimum time we will wait (after passing the minimum slots number) between handing out new slots in milliseconds. Defaults to 0 for workflows and 50ms for activities. - + This value matters because how many resources a task will use cannot be determined ahead of time, and thus the system should wait to see how much resources are used before issuing more slots.""" @@ -101,7 +102,13 @@ class SlotReserveContext(Protocol): worker_identity: str """The identity of the worker that is requesting the reservation.""" worker_build_id: str - """The build id of the worker that is requesting the reservation.""" + """The build id of the worker that is requesting the reservation. + + .. warning:: + Deprecated, use :py:attr:`worker_deployment_version` instead. + """ + worker_deployment_version: Optional[WorkerDeploymentVersion] + """The deployment version of the worker that is requesting the reservation, if any.""" is_sticky: bool """True iff this is a reservation for a sticky poll for a workflow task.""" diff --git a/temporalio/worker/_worker.py b/temporalio/worker/_worker.py index 3aeb6d4e2..f0b446111 100644 --- a/temporalio/worker/_worker.py +++ b/temporalio/worker/_worker.py @@ -8,8 +8,18 @@ import logging import sys import warnings +from dataclasses import dataclass from datetime import timedelta -from typing import Any, Awaitable, Callable, List, Optional, Sequence, Type, cast +from typing import ( + Any, + Awaitable, + Callable, + List, + Optional, + Sequence, + Type, + cast, +) from typing_extensions import TypedDict @@ -26,10 +36,11 @@ import temporalio.exceptions import temporalio.runtime import temporalio.service +from temporalio.common import VersioningBehavior, WorkerDeploymentVersion from ._activity import SharedStateManager, _ActivityWorker from ._interceptor import Interceptor -from ._tuning import WorkerTuner, _to_bridge_slot_supplier +from ._tuning import WorkerTuner from ._workflow import _WorkflowWorker from ._workflow_instance import UnsandboxedWorkflowRunner, WorkflowRunner from .workflow_sandbox import SandboxedWorkflowRunner @@ -82,6 +93,7 @@ def __init__( on_fatal_error: Optional[Callable[[BaseException], Awaitable[None]]] = None, use_worker_versioning: bool = False, disable_safe_workflow_eviction: bool = False, + deployment_config: Optional[WorkerDeploymentConfig] = None, ) -> None: """Create a worker to process workflows and/or activities. @@ -124,6 +136,8 @@ def __init__( build_id: Unique identifier for the current runtime. This is best set as a hash of all code and should change only when code does. If unset, a best-effort identifier is generated. + Exclusive with `deployment_config`. + WARNING: Deprecated. Use `deployment_config` instead. identity: Identity for this worker client. If unset, the client identity is used. max_cached_workflows: If nonzero, workflows will be cached and @@ -205,6 +219,8 @@ def __init__( workflows which it claims to be compatible with. For more information, see https://docs.temporal.io/workers#worker-versioning. + Exclusive with `deployment_config`. + WARNING: Deprecated. Use `deployment_config` instead. disable_safe_workflow_eviction: If true, instead of letting the workflow collect its tasks properly, the worker will simply let the Python garbage collector collect the tasks. WARNING: Users @@ -212,6 +228,9 @@ def __init__( throw ``GeneratorExit`` in coroutines causing them to wake up in different threads and run ``finally`` and other code in the wrong workflow environment. + deployment_config: Deployment config for the worker. Exclusive with `build_id` and + `use_worker_versioning`. + WARNING: This is an experimental feature and may change in the future. """ if not activities and not workflows: raise ValueError("At least one activity or workflow must be specified") @@ -219,6 +238,10 @@ def __init__( raise ValueError( "build_id must be specified when use_worker_versioning is True" ) + if deployment_config and (build_id or use_worker_versioning): + raise ValueError( + "deployment_config cannot be used with build_id or use_worker_versioning" + ) # Prepend applicable client interceptors to the given ones client_config = client.config() @@ -307,6 +330,12 @@ def __init__( ) self._workflow_worker: Optional[_WorkflowWorker] = None if workflows: + should_enforce_versioning_behavior = ( + deployment_config is not None + and deployment_config.use_worker_versioning + and deployment_config.default_versioning_behavior + == temporalio.common.VersioningBehavior.UNSPECIFIED + ) self._workflow_worker = _WorkflowWorker( bridge_worker=lambda: self._bridge_worker, namespace=client.namespace, @@ -324,6 +353,7 @@ def __init__( metric_meter=self._runtime.metric_meter, on_eviction_hook=None, disable_safe_eviction=disable_safe_workflow_eviction, + should_enforce_versioning_behavior=should_enforce_versioning_behavior, ) if tuner is not None: @@ -345,6 +375,24 @@ def __init__( bridge_tuner = tuner._to_bridge_tuner() + versioning_strategy: temporalio.bridge.worker.WorkerVersioningStrategy + if deployment_config: + versioning_strategy = ( + deployment_config._to_bridge_worker_deployment_options() + ) + elif use_worker_versioning: + build_id = build_id or load_default_build_id() + versioning_strategy = ( + temporalio.bridge.worker.WorkerVersioningStrategyLegacyBuildIdBased( + build_id=build_id + ) + ) + else: + build_id = build_id or load_default_build_id() + versioning_strategy = temporalio.bridge.worker.WorkerVersioningStrategyNone( + build_id=build_id + ) + # Create bridge worker last. We have empirically observed that if it is # created before an error is raised from the activity worker # constructor, a deadlock/hang will occur presumably while trying to @@ -357,7 +405,6 @@ def __init__( temporalio.bridge.worker.WorkerConfig( namespace=client.namespace, task_queue=task_queue, - build_id=build_id or load_default_build_id(), identity_override=identity, max_cached_workflows=max_cached_workflows, tuner=bridge_tuner, @@ -382,7 +429,6 @@ def __init__( graceful_shutdown_period_millis=int( 1000 * graceful_shutdown_timeout.total_seconds() ), - use_worker_versioning=use_worker_versioning, # Need to tell core whether we want to consider all # non-determinism exceptions as workflow fail, and whether we do # per workflow type @@ -393,6 +439,7 @@ def __init__( if self._workflow_worker else set() ), + versioning_strategy=versioning_strategy, ), ) @@ -666,6 +713,31 @@ class WorkerConfig(TypedDict, total=False): on_fatal_error: Optional[Callable[[BaseException], Awaitable[None]]] use_worker_versioning: bool disable_safe_workflow_eviction: bool + deployment_config: Optional[WorkerDeploymentConfig] + + +@dataclass +class WorkerDeploymentConfig: + """Options for configuring the Worker Versioning feature. + + WARNING: This is an experimental feature and may change in the future. + """ + + version: WorkerDeploymentVersion + use_worker_versioning: bool + default_versioning_behavior: VersioningBehavior = VersioningBehavior.UNSPECIFIED + + def _to_bridge_worker_deployment_options( + self, + ) -> temporalio.bridge.worker.WorkerDeploymentOptions: + return temporalio.bridge.worker.WorkerDeploymentOptions( + version=temporalio.bridge.worker.WorkerDeploymentVersion( + deployment_name=self.version.deployment_name, + build_id=self.version.build_id, + ), + use_worker_versioning=self.use_worker_versioning, + default_versioning_behavior=self.default_versioning_behavior.value, + ) _default_build_id: Optional[str] = None diff --git a/temporalio/worker/_workflow.py b/temporalio/worker/_workflow.py index 37e6810c9..914ce1946 100644 --- a/temporalio/worker/_workflow.py +++ b/temporalio/worker/_workflow.py @@ -8,7 +8,6 @@ import os import sys import threading -from dataclasses import dataclass from datetime import timezone from types import TracebackType from typing import ( @@ -78,6 +77,7 @@ def __init__( ] ], disable_safe_eviction: bool, + should_enforce_versioning_behavior: bool, ) -> None: self._bridge_worker = bridge_worker self._namespace = namespace @@ -135,6 +135,16 @@ def __init__( # Confirm name unique if defn.name in self._workflows: raise ValueError(f"More than one workflow named {defn.name}") + if should_enforce_versioning_behavior: + if defn.versioning_behavior in [ + None, + temporalio.common.VersioningBehavior.UNSPECIFIED, + ]: + raise ValueError( + f"Workflow {defn.name} must specify a versioning behavior using " + "the `versioning_behavior` argument to `@workflow.defn`." + ) + # Prepare the workflow with the runner (this will error in the # sandbox if an import fails somehow) try: diff --git a/temporalio/worker/_workflow_instance.py b/temporalio/worker/_workflow_instance.py index c82a9bba9..877737355 100644 --- a/temporalio/worker/_workflow_instance.py +++ b/temporalio/worker/_workflow_instance.py @@ -50,6 +50,7 @@ import temporalio.api.sdk.v1 import temporalio.bridge.proto.activity_result import temporalio.bridge.proto.child_workflow +import temporalio.bridge.proto.common import temporalio.bridge.proto.workflow_activation import temporalio.bridge.proto.workflow_commands import temporalio.bridge.proto.workflow_completion @@ -211,7 +212,9 @@ def __init__(self, det: WorkflowInstanceDetails) -> None: self._primary_task: Optional[asyncio.Task[None]] = None self._time_ns = 0 self._cancel_requested = False - self._current_build_id = "" + self._deployment_version_for_current_task: Optional[ + temporalio.bridge.proto.common.WorkerDeploymentVersion + ] = None self._current_history_length = 0 self._current_history_size = 0 self._continue_as_new_suggested = False @@ -345,8 +348,15 @@ def activate( temporalio.bridge.proto.workflow_completion.WorkflowActivationCompletion() ) self._current_completion.successful.SetInParent() + self._current_completion.successful.versioning_behavior = ( + self._defn.versioning_behavior.value + if self._defn.versioning_behavior + else temporalio.api.enums.v1.VersioningBehavior.VERSIONING_BEHAVIOR_UNSPECIFIED + ) self._current_activation_error: Optional[Exception] = None - self._current_build_id = act.build_id_for_current_task + self._deployment_version_for_current_task = ( + act.deployment_version_for_current_task + ) self._current_history_length = act.history_length self._current_history_size = act.history_size_bytes self._continue_as_new_suggested = act.continue_as_new_suggested @@ -429,6 +439,7 @@ def activate( ) # Set completion failure self._current_completion.failed.failure.SetInParent() + # TODO: Review - odd that we don't un-set success here? try: self._failure_converter.to_failure( activation_err, @@ -979,7 +990,19 @@ def workflow_extern_functions(self) -> Mapping[str, Callable]: return self._extern_functions def workflow_get_current_build_id(self) -> str: - return self._current_build_id + if not self._deployment_version_for_current_task: + return "" + return self._deployment_version_for_current_task.build_id + + def workflow_get_current_deployment_version( + self, + ) -> Optional[temporalio.common.WorkerDeploymentVersion]: + if not self._deployment_version_for_current_task: + return None + return temporalio.common.WorkerDeploymentVersion( + build_id=self._deployment_version_for_current_task.build_id, + deployment_name=self._deployment_version_for_current_task.deployment_name, + ) def workflow_get_current_history_length(self) -> int: return self._current_history_length diff --git a/temporalio/workflow.py b/temporalio/workflow.py index 96c105493..3d6b55006 100644 --- a/temporalio/workflow.py +++ b/temporalio/workflow.py @@ -90,12 +90,16 @@ def defn( name: Optional[str] = None, sandboxed: bool = True, failure_exception_types: Sequence[Type[BaseException]] = [], + versioning_behavior: temporalio.common.VersioningBehavior = temporalio.common.VersioningBehavior.UNSPECIFIED, ) -> Callable[[ClassType], ClassType]: ... @overload def defn( - *, sandboxed: bool = True, dynamic: bool = False + *, + sandboxed: bool = True, + dynamic: bool = False, + versioning_behavior: temporalio.common.VersioningBehavior = temporalio.common.VersioningBehavior.UNSPECIFIED, ) -> Callable[[ClassType], ClassType]: ... @@ -106,6 +110,7 @@ def defn( sandboxed: bool = True, dynamic: bool = False, failure_exception_types: Sequence[Type[BaseException]] = [], + versioning_behavior: temporalio.common.VersioningBehavior = temporalio.common.VersioningBehavior.UNSPECIFIED, ): """Decorator for workflow classes. @@ -127,6 +132,8 @@ def defn( applied in addition to ones set on the worker constructor. If ``Exception`` is set, it effectively will fail a workflow/update in all user exception cases. WARNING: This setting is experimental. + versioning_behavior: Specifies the versioning behavior to use for this workflow. + WARNING: This setting is experimental. """ def decorator(cls: ClassType) -> ClassType: @@ -136,6 +143,7 @@ def decorator(cls: ClassType) -> ClassType: workflow_name=name or cls.__name__ if not dynamic else None, sandboxed=sandboxed, failure_exception_types=failure_exception_types, + versioning_behavior=versioning_behavior, ) return cls @@ -472,12 +480,28 @@ def _logger_details(self) -> Mapping[str, Any]: def get_current_build_id(self) -> str: """Get the Build ID of the worker which executed the current Workflow Task. - May be undefined if the task was completed by a worker without a Build ID. If this worker is the one executing - this task for the first time and has a Build ID set, then its ID will be used. This value may change over the - lifetime of the workflow run, but is deterministic and safe to use for branching. + May be undefined if the task was completed by a worker without a Build ID. If this worker is + the one executing this task for the first time and has a Build ID set, then its ID will be + used. This value may change over the lifetime of the workflow run, but is deterministic and + safe to use for branching. + + .. deprecated:: + Use get_current_deployment_version instead. """ return _Runtime.current().workflow_get_current_build_id() + def get_current_deployment_version( + self, + ) -> Optional[temporalio.common.WorkerDeploymentVersion]: + """Get the deployment version of the worker which executed the current Workflow Task. + + May be None if the task was completed by a worker without a deployment version or build + id. If this worker is the one executing this task for the first time and has a deployment + version set, then its ID will be used. This value may change over the lifetime of the + workflow run, but is deterministic and safe to use for branching. + """ + return _Runtime.current().workflow_get_current_deployment_version() + def get_current_history_length(self) -> int: """Get the current number of events in history. @@ -605,6 +629,11 @@ def workflow_extern_functions(self) -> Mapping[str, Callable]: ... @abstractmethod def workflow_get_current_build_id(self) -> str: ... + @abstractmethod + def workflow_get_current_deployment_version( + self, + ) -> Optional[temporalio.common.WorkerDeploymentVersion]: ... + @abstractmethod def workflow_get_current_history_length(self) -> int: ... @@ -1419,6 +1448,7 @@ class _Definition: # Types loaded on post init if both are None arg_types: Optional[List[Type]] = None ret_type: Optional[Type] = None + versioning_behavior: Optional[temporalio.common.VersioningBehavior] = None @staticmethod def from_class(cls: Type) -> Optional[_Definition]: @@ -1473,6 +1503,7 @@ def _apply_to_class( workflow_name: Optional[str], sandboxed: bool, failure_exception_types: Sequence[Type[BaseException]], + versioning_behavior: temporalio.common.VersioningBehavior, ) -> None: # Check it's not being doubly applied if _Definition.from_class(cls): @@ -1601,6 +1632,7 @@ def _apply_to_class( ) assert run_fn + assert seen_run_attr defn = _Definition( name=workflow_name, cls=cls, @@ -1610,6 +1642,7 @@ def _apply_to_class( updates=updates, sandboxed=sandboxed, failure_exception_types=failure_exception_types, + versioning_behavior=versioning_behavior, ) setattr(cls, "__temporal_workflow_definition", defn) setattr(run_fn, "__temporal_workflow_definition", defn) diff --git a/tests/conftest.py b/tests/conftest.py index c91c366bc..e2c542c15 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -107,6 +107,12 @@ async def env(env_type: str) -> AsyncGenerator[WorkflowEnvironment, None]: "system.enableEagerWorkflowStart=true", "--dynamic-config-value", "frontend.enableExecuteMultiOperation=true", + "--dynamic-config-value", + "frontend.enableVersioningWorkflowAPIs=true", + "--dynamic-config-value", + "frontend.enableVersioningDataAPIs=true", + "--dynamic-config-value", + "system.enableDeploymentVersions=true", ], # TODO: Remove after next CLI release dev_server_download_version="v1.3.1-priority.0", diff --git a/tests/helpers/__init__.py b/tests/helpers/__init__.py index bda35ea6b..48eb03f53 100644 --- a/tests/helpers/__init__.py +++ b/tests/helpers/__init__.py @@ -46,26 +46,26 @@ def new_worker( ) +T = TypeVar("T") + + async def assert_eventually( - fn: Callable[[], Awaitable], + fn: Callable[[], Awaitable[T]], *, timeout: timedelta = timedelta(seconds=10), interval: timedelta = timedelta(milliseconds=200), -) -> None: +) -> T: start_sec = time.monotonic() while True: try: - await fn() - return + res = await fn() + return res except AssertionError: if timedelta(seconds=time.monotonic() - start_sec) >= timeout: raise await asyncio.sleep(interval.total_seconds()) -T = TypeVar("T") - - async def assert_eq_eventually( expected: T, fn: Callable[[], Awaitable[T]], diff --git a/tests/test_workflow.py b/tests/test_workflow.py index 30cc4db76..8e4b0bca8 100644 --- a/tests/test_workflow.py +++ b/tests/test_workflow.py @@ -5,7 +5,7 @@ import pytest from temporalio import workflow -from temporalio.common import RawValue +from temporalio.common import RawValue, VersioningBehavior class GoodDefnBase: @@ -130,6 +130,29 @@ def test_workflow_defn_good(): }, sandboxed=True, failure_exception_types=[], + versioning_behavior=VersioningBehavior.UNSPECIFIED, + ) + + +@workflow.defn(versioning_behavior=VersioningBehavior.PINNED) +class VersioningBehaviorDefn: + @workflow.run + async def run(self, name: str) -> str: + raise NotImplementedError + + +def test_workflow_definition_with_versioning_behavior(): + defn = workflow._Definition.from_class(VersioningBehaviorDefn) + assert defn == workflow._Definition( + name="VersioningBehaviorDefn", + cls=VersioningBehaviorDefn, + run_fn=VersioningBehaviorDefn.run, + signals={}, + queries={}, + updates={}, + sandboxed=True, + failure_exception_types=[], + versioning_behavior=VersioningBehavior.PINNED, ) diff --git a/tests/testing/test_workflow.py b/tests/testing/test_workflow.py index 7538cb29d..126269f12 100644 --- a/tests/testing/test_workflow.py +++ b/tests/testing/test_workflow.py @@ -1,5 +1,6 @@ import asyncio import platform +import sys import uuid from datetime import datetime, timedelta, timezone from time import monotonic @@ -223,7 +224,9 @@ def assert_proper_error(err: Optional[BaseException]) -> None: # In unsandboxed workflows, this message has extra diff info appended # due to pytest's custom loader that does special assert tricks. But in # sandboxed workflows, this just has the first line. - assert err.message.startswith("assert 'foo' == 'bar'") + # The plain asserter is used for 3.9 & below due to import issues + if sys.version_info[:2] > (3, 9): + assert err.message.startswith("assert 'foo' == 'bar'") async with WorkflowEnvironment.from_client(client) as env: async with new_worker(env.client, AssertFailWorkflow) as worker: diff --git a/tests/worker/test_worker.py b/tests/worker/test_worker.py index 17d75db27..67ce97ae9 100644 --- a/tests/worker/test_worker.py +++ b/tests/worker/test_worker.py @@ -2,15 +2,27 @@ import asyncio import concurrent.futures +import sys import uuid from datetime import timedelta -from typing import Any, Awaitable, Callable, Optional +from typing import Any, Awaitable, Callable, Optional, Sequence import pytest +import temporalio.api.enums.v1 import temporalio.worker._worker from temporalio import activity, workflow +from temporalio.api.workflowservice.v1 import ( + DescribeWorkerDeploymentRequest, + DescribeWorkerDeploymentResponse, + SetWorkerDeploymentCurrentVersionRequest, + SetWorkerDeploymentCurrentVersionResponse, + SetWorkerDeploymentRampingVersionRequest, + SetWorkerDeploymentRampingVersionResponse, +) from temporalio.client import BuildIdOpAddNewDefault, Client, TaskReachabilityType +from temporalio.common import RawValue, VersioningBehavior +from temporalio.service import RPCError from temporalio.testing import WorkflowEnvironment from temporalio.worker import ( ActivitySlotInfo, @@ -25,11 +37,13 @@ SlotReleaseContext, SlotReserveContext, Worker, + WorkerDeploymentConfig, + WorkerDeploymentVersion, WorkerTuner, WorkflowSlotInfo, ) from temporalio.workflow import VersioningIntent -from tests.helpers import new_worker, worker_versioning_enabled +from tests.helpers import assert_eventually, new_worker, worker_versioning_enabled def test_load_default_worker_binary_id(): @@ -518,6 +532,444 @@ def release_slot(self, ctx: SlotReleaseContext) -> None: await asyncio.sleep(1) +@workflow.defn( + name="DeploymentVersioningWorkflow", + versioning_behavior=VersioningBehavior.AUTO_UPGRADE, +) +class DeploymentVersioningWorkflowV1AutoUpgrade: + def __init__(self) -> None: + self.finish = False + + @workflow.run + async def run(self): + await workflow.wait_condition(lambda: self.finish) + return "version-v1" + + @workflow.signal + def do_finish(self): + self.finish = True + + @workflow.query + def state(self): + return "v1" + + +@workflow.defn( + name="DeploymentVersioningWorkflow", versioning_behavior=VersioningBehavior.PINNED +) +class DeploymentVersioningWorkflowV2Pinned: + def __init__(self) -> None: + self.finish = False + + @workflow.run + async def run(self): + await workflow.wait_condition(lambda: self.finish) + depver = workflow.info().get_current_deployment_version() + assert depver + assert depver.build_id == "2.0" + # Just ensuring the rust object was converted properly and this method still works + workflow.logger.debug(f"Dep string: {depver.to_canonical_string()}") + return "version-v2" + + @workflow.signal + def do_finish(self): + self.finish = True + + @workflow.query + def state(self): + return "v2" + + +@workflow.defn( + name="DeploymentVersioningWorkflow", + versioning_behavior=VersioningBehavior.AUTO_UPGRADE, +) +class DeploymentVersioningWorkflowV3AutoUpgrade: + def __init__(self) -> None: + self.finish = False + + @workflow.run + async def run(self): + await workflow.wait_condition(lambda: self.finish) + return "version-v3" + + @workflow.signal + def do_finish(self): + self.finish = True + + @workflow.query + def state(self): + return "v3" + + +async def test_worker_with_worker_deployment_config( + client: Client, env: WorkflowEnvironment +): + if env.supports_time_skipping: + pytest.skip("Test Server doesn't support worker deployments") + + deployment_name = f"deployment-{uuid.uuid4()}" + worker_v1 = WorkerDeploymentVersion(deployment_name=deployment_name, build_id="1.0") + worker_v2 = WorkerDeploymentVersion(deployment_name=deployment_name, build_id="2.0") + worker_v3 = WorkerDeploymentVersion(deployment_name=deployment_name, build_id="3.0") + async with ( + new_worker( + client, + DeploymentVersioningWorkflowV1AutoUpgrade, + deployment_config=WorkerDeploymentConfig( + version=worker_v1, + use_worker_versioning=True, + ), + ) as w1, + new_worker( + client, + DeploymentVersioningWorkflowV2Pinned, + deployment_config=WorkerDeploymentConfig( + version=worker_v2, + use_worker_versioning=True, + ), + task_queue=w1.task_queue, + ), + new_worker( + client, + DeploymentVersioningWorkflowV3AutoUpgrade, + deployment_config=WorkerDeploymentConfig( + version=worker_v3, + use_worker_versioning=True, + ), + task_queue=w1.task_queue, + ), + ): + describe_resp = await wait_until_worker_deployment_visible( + client, + worker_v1, + ) + await set_current_deployment_version( + client, describe_resp.conflict_token, worker_v1 + ) + + # Start workflow 1 which will use the 1.0 worker on auto-upgrade + wf1 = await client.start_workflow( + DeploymentVersioningWorkflowV1AutoUpgrade.run, + id="basic-versioning-v1", + task_queue=w1.task_queue, + ) + assert "v1" == await wf1.query("state") + + describe_resp2 = await wait_until_worker_deployment_visible(client, worker_v2) + await set_current_deployment_version( + client, describe_resp2.conflict_token, worker_v2 + ) + + wf2 = await client.start_workflow( + DeploymentVersioningWorkflowV2Pinned.run, + id="basic-versioning-v2", + task_queue=w1.task_queue, + ) + assert "v2" == await wf2.query("state") + + describe_resp3 = await wait_until_worker_deployment_visible(client, worker_v3) + await set_current_deployment_version( + client, describe_resp3.conflict_token, worker_v3 + ) + + wf3 = await client.start_workflow( + DeploymentVersioningWorkflowV3AutoUpgrade.run, + id="basic-versioning-v3", + task_queue=w1.task_queue, + ) + assert "v3" == await wf3.query("state") + + # Signal all workflows to finish + await wf1.signal(DeploymentVersioningWorkflowV1AutoUpgrade.do_finish) + await wf2.signal(DeploymentVersioningWorkflowV2Pinned.do_finish) + await wf3.signal(DeploymentVersioningWorkflowV3AutoUpgrade.do_finish) + + res1 = await wf1.result() + res2 = await wf2.result() + res3 = await wf3.result() + + assert res1 == "version-v3" + assert res2 == "version-v2" + assert res3 == "version-v3" + + +async def test_worker_deployment_ramp(client: Client, env: WorkflowEnvironment): + if env.supports_time_skipping: + pytest.skip("Test Server doesn't support worker deployments") + + deployment_name = f"deployment-ramping-{uuid.uuid4()}" + v1 = WorkerDeploymentVersion(deployment_name=deployment_name, build_id="1.0") + v2 = WorkerDeploymentVersion(deployment_name=deployment_name, build_id="2.0") + async with ( + new_worker( + client, + DeploymentVersioningWorkflowV1AutoUpgrade, + deployment_config=WorkerDeploymentConfig( + version=v1, use_worker_versioning=True + ), + ) as w1, + new_worker( + client, + DeploymentVersioningWorkflowV2Pinned, + deployment_config=WorkerDeploymentConfig( + version=v2, use_worker_versioning=True + ), + task_queue=w1.task_queue, + ), + ): + await wait_until_worker_deployment_visible(client, v1) + describe_resp = await wait_until_worker_deployment_visible(client, v2) + + # Set current version to v1 and ramp v2 to 100% + conflict_token = ( + await set_current_deployment_version( + client, describe_resp.conflict_token, v1 + ) + ).conflict_token + conflict_token = ( + await set_ramping_version(client, conflict_token, v2, 100) + ).conflict_token + + # Run workflows and verify they run on v2 + for i in range(3): + wf = await client.start_workflow( + DeploymentVersioningWorkflowV2Pinned.run, + id=f"versioning-ramp-100-{i}-{uuid.uuid4()}", + task_queue=w1.task_queue, + ) + await wf.signal(DeploymentVersioningWorkflowV2Pinned.do_finish) + res = await wf.result() + assert res == "version-v2" + + # Set ramp to 0, expecting workflows to run on v1 + conflict_token = ( + await set_ramping_version(client, conflict_token, v2, 0) + ).conflict_token + for i in range(3): + wfa = await client.start_workflow( + DeploymentVersioningWorkflowV1AutoUpgrade.run, + id=f"versioning-ramp-0-{i}-{uuid.uuid4()}", + task_queue=w1.task_queue, + ) + await wfa.signal(DeploymentVersioningWorkflowV1AutoUpgrade.do_finish) + res = await wfa.result() + assert res == "version-v1" + + # Set ramp to 50 and eventually verify workflows run on both versions + await set_ramping_version(client, conflict_token, v2, 50) + seen_results = set() + + async def run_and_record(): + wf = await client.start_workflow( + DeploymentVersioningWorkflowV1AutoUpgrade.run, + id=f"versioning-ramp-50-{uuid.uuid4()}", + task_queue=w1.task_queue, + ) + await wf.signal(DeploymentVersioningWorkflowV1AutoUpgrade.do_finish) + return await wf.result() + + async def check_results(): + res = await run_and_record() + seen_results.add(res) + assert "version-v1" in seen_results and "version-v2" in seen_results + + await assert_eventually(check_results) + + +@workflow.defn(dynamic=True, versioning_behavior=VersioningBehavior.PINNED) +class DynamicWorkflowVersioningOnDefn: + @workflow.run + async def run(self, args: Sequence[RawValue]) -> str: + return "dynamic" + + +async def test_worker_deployment_dynamic_workflow_on_run( + client: Client, env: WorkflowEnvironment +): + if env.supports_time_skipping: + pytest.skip("Test Server doesn't support worker deployments") + + deployment_name = f"deployment-dynamic-{uuid.uuid4()}" + worker_v1 = WorkerDeploymentVersion(deployment_name=deployment_name, build_id="1.0") + + async with new_worker( + client, + DynamicWorkflowVersioningOnDefn, + deployment_config=WorkerDeploymentConfig( + version=worker_v1, + use_worker_versioning=True, + ), + ) as w: + describe_resp = await wait_until_worker_deployment_visible( + client, + worker_v1, + ) + await set_current_deployment_version( + client, describe_resp.conflict_token, worker_v1 + ) + + wf = await client.start_workflow( + "cooldynamicworkflow", + id=f"dynamic-workflow-versioning-{uuid.uuid4()}", + task_queue=w.task_queue, + ) + result = await wf.result() + assert result == "dynamic" + + history = await wf.fetch_history() + assert any( + event.HasField("workflow_task_completed_event_attributes") + and event.workflow_task_completed_event_attributes.versioning_behavior + == temporalio.api.enums.v1.VersioningBehavior.VERSIONING_BEHAVIOR_PINNED + for event in history.events + ) + + +@workflow.defn +class NoVersioningAnnotationWorkflow: + @workflow.run + async def run(self) -> str: + return "whee" + + +@workflow.defn(dynamic=True) +class NoVersioningAnnotationDynamicWorkflow: + @workflow.run + async def run(self, args: Sequence[RawValue]) -> str: + return "whee" + + +async def test_workflows_must_have_versioning_behavior_when_feature_turned_on( + client: Client, env: WorkflowEnvironment +): + with pytest.raises(ValueError) as exc_info: + Worker( + client, + task_queue=f"task-queue-{uuid.uuid4()}", + workflows=[NoVersioningAnnotationWorkflow], + deployment_config=WorkerDeploymentConfig( + version=WorkerDeploymentVersion( + deployment_name="whatever", build_id="1.0" + ), + use_worker_versioning=True, + ), + ) + + assert "must specify a versioning behavior" in str(exc_info.value) + + with pytest.raises(ValueError) as exc_info: + Worker( + client, + task_queue=f"task-queue-{uuid.uuid4()}", + workflows=[NoVersioningAnnotationDynamicWorkflow], + deployment_config=WorkerDeploymentConfig( + version=WorkerDeploymentVersion( + deployment_name="whatever", build_id="1.0" + ), + use_worker_versioning=True, + ), + ) + + assert "must specify a versioning behavior" in str(exc_info.value) + + +async def test_workflows_can_use_default_versioning_behavior( + client: Client, env: WorkflowEnvironment +): + if env.supports_time_skipping: + pytest.skip("Test Server doesn't support worker versioning") + + deployment_name = f"deployment-default-versioning-{uuid.uuid4()}" + worker_v1 = WorkerDeploymentVersion(deployment_name=deployment_name, build_id="1.0") + + async with new_worker( + client, + NoVersioningAnnotationWorkflow, + deployment_config=WorkerDeploymentConfig( + version=worker_v1, + use_worker_versioning=True, + default_versioning_behavior=VersioningBehavior.PINNED, + ), + ) as w: + describe_resp = await wait_until_worker_deployment_visible( + client, + worker_v1, + ) + await set_current_deployment_version( + client, describe_resp.conflict_token, worker_v1 + ) + + wf = await client.start_workflow( + NoVersioningAnnotationWorkflow.run, + id=f"default-versioning-behavior-{uuid.uuid4()}", + task_queue=w.task_queue, + ) + await wf.result() + + history = await wf.fetch_history() + assert any( + event.HasField("workflow_task_completed_event_attributes") + and event.workflow_task_completed_event_attributes.versioning_behavior + == temporalio.api.enums.v1.VersioningBehavior.VERSIONING_BEHAVIOR_PINNED + for event in history.events + ) + + +async def wait_until_worker_deployment_visible( + client: Client, version: WorkerDeploymentVersion +) -> DescribeWorkerDeploymentResponse: + async def mk_call() -> DescribeWorkerDeploymentResponse: + try: + res = await client.workflow_service.describe_worker_deployment( + DescribeWorkerDeploymentRequest( + namespace=client.namespace, + deployment_name=version.deployment_name, + ) + ) + except RPCError: + # Expected + assert False + assert any( + vs.version == version.to_canonical_string() + for vs in res.worker_deployment_info.version_summaries + ) + return res + + return await assert_eventually(mk_call) + + +async def set_current_deployment_version( + client: Client, conflict_token: bytes, version: WorkerDeploymentVersion +) -> SetWorkerDeploymentCurrentVersionResponse: + return await client.workflow_service.set_worker_deployment_current_version( + SetWorkerDeploymentCurrentVersionRequest( + namespace=client.namespace, + deployment_name=version.deployment_name, + version=version.to_canonical_string(), + conflict_token=conflict_token, + ) + ) + + +async def set_ramping_version( + client: Client, + conflict_token: bytes, + version: WorkerDeploymentVersion, + percentage: float, +) -> SetWorkerDeploymentRampingVersionResponse: + response = await client.workflow_service.set_worker_deployment_ramping_version( + SetWorkerDeploymentRampingVersionRequest( + namespace=client.namespace, + deployment_name=version.deployment_name, + version=version.to_canonical_string(), + conflict_token=conflict_token, + percentage=percentage, + ) + ) + return response + + def create_worker( client: Client, on_fatal_error: Optional[Callable[[BaseException], Awaitable[None]]] = None,