Skip to content

Commit 5882d52

Browse files
authored
Update protos 1.16.0 rc.3 (#824)
* updated protos Signed-off-by: Albert Callarisa <[email protected]> * Fix usage of `overwrite`, from job to the schedule request. Signed-off-by: Albert Callarisa <[email protected]> --------- Signed-off-by: Albert Callarisa <[email protected]>
1 parent a7bde02 commit 5882d52

File tree

10 files changed

+402
-390
lines changed

10 files changed

+402
-390
lines changed

dapr/aio/clients/grpc/client.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1851,13 +1851,14 @@ async def get_metadata(self) -> GetMetadataResponse:
18511851
headers=await call.initial_metadata(),
18521852
)
18531853

1854-
async def schedule_job_alpha1(self, job: Job) -> DaprResponse:
1854+
async def schedule_job_alpha1(self, job: Job, overwrite: bool = False) -> DaprResponse:
18551855
"""Schedules a job to be triggered at a specified time or interval.
18561856
18571857
This is an Alpha API and is subject to change.
18581858
18591859
Args:
18601860
job (Job): The job to schedule. Must have a name and either schedule or due_time.
1861+
overwrite (bool): If true, allows this job to overwrite an existing job with the same name.
18611862
18621863
Returns:
18631864
DaprResponse: Empty response indicating successful scheduling.
@@ -1879,7 +1880,7 @@ async def schedule_job_alpha1(self, job: Job) -> DaprResponse:
18791880

18801881
# Convert job to proto using the Job class private method
18811882
job_proto = job._get_proto()
1882-
request = api_v1.ScheduleJobRequest(job=job_proto)
1883+
request = api_v1.ScheduleJobRequest(job=job_proto, overwrite=overwrite)
18831884

18841885
try:
18851886
call = self._stub.ScheduleJobAlpha1(request)

dapr/clients/grpc/_jobs.py

Lines changed: 1 addition & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -99,7 +99,6 @@ class Job:
9999
when the job is triggered. If not provided, an empty Any proto will be used.
100100
failure_policy (Optional[FailurePolicy]): The failure policy to apply when the job fails
101101
to trigger. If not provided, the default behavior is determined by the Dapr runtime.
102-
overwrite (bool): If true, allows this job to overwrite an existing job with the same name.
103102
"""
104103

105104
name: str
@@ -109,7 +108,6 @@ class Job:
109108
ttl: Optional[str] = None
110109
data: Optional[GrpcAny] = None
111110
failure_policy: Optional[FailurePolicy] = None
112-
overwrite: bool = False
113111

114112
def _get_proto(self):
115113
"""Convert this Job instance to a Dapr Job proto message.
@@ -123,7 +121,7 @@ def _get_proto(self):
123121
from google.protobuf.any_pb2 import Any as GrpcAny
124122

125123
# Build the job proto
126-
job_proto = api_v1.Job(name=self.name, overwrite=self.overwrite)
124+
job_proto = api_v1.Job(name=self.name)
127125

128126
if self.schedule:
129127
job_proto.schedule = self.schedule
@@ -133,7 +131,6 @@ def _get_proto(self):
133131
job_proto.due_time = self.due_time
134132
if self.ttl:
135133
job_proto.ttl = self.ttl
136-
# overwrite is already set in the constructor above
137134

138135
# data field is required, set empty Any if not provided
139136
if self.data:
@@ -184,5 +181,4 @@ def _from_proto(cls, job_proto):
184181
ttl=job_proto.ttl if job_proto.HasField('ttl') else None,
185182
data=job_proto.data if job_proto.HasField('data') and job_proto.data.value else None,
186183
failure_policy=failure_policy,
187-
overwrite=job_proto.overwrite,
188184
)

dapr/clients/grpc/client.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1778,13 +1778,14 @@ def converse_alpha1(
17781778
except RpcError as err:
17791779
raise DaprGrpcError(err) from err
17801780

1781-
def schedule_job_alpha1(self, job: Job) -> DaprResponse:
1781+
def schedule_job_alpha1(self, job: Job, overwrite: bool = False) -> DaprResponse:
17821782
"""Schedules a job to be triggered at a specified time or interval.
17831783
17841784
This is an Alpha API and is subject to change.
17851785
17861786
Args:
17871787
job (Job): The job to schedule. Must have a name and either schedule or due_time.
1788+
overwrite (bool): If true, allows this job to overwrite an existing job with the same name.
17881789
17891790
Returns:
17901791
DaprResponse: Empty response indicating successful scheduling.
@@ -1806,7 +1807,7 @@ def schedule_job_alpha1(self, job: Job) -> DaprResponse:
18061807

18071808
# Convert job to proto using the Job class private method
18081809
job_proto = job._get_proto()
1809-
request = api_v1.ScheduleJobRequest(job=job_proto)
1810+
request = api_v1.ScheduleJobRequest(job=job_proto, overwrite=overwrite)
18101811

18111812
try:
18121813
_, call = self.retry_policy.run_rpc(self._stub.ScheduleJobAlpha1.with_call, request)

dapr/proto/runtime/v1/dapr_pb2.py

Lines changed: 324 additions & 327 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

dapr/proto/runtime/v1/dapr_pb2.pyi

Lines changed: 10 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,7 @@ import google.protobuf.descriptor
2323
import google.protobuf.internal.containers
2424
import google.protobuf.internal.enum_type_wrapper
2525
import google.protobuf.message
26+
import google.protobuf.struct_pb2
2627
import google.protobuf.timestamp_pb2
2728
import sys
2829
import typing
@@ -3166,7 +3167,6 @@ class Job(google.protobuf.message.Message):
31663167
DUE_TIME_FIELD_NUMBER: builtins.int
31673168
TTL_FIELD_NUMBER: builtins.int
31683169
DATA_FIELD_NUMBER: builtins.int
3169-
OVERWRITE_FIELD_NUMBER: builtins.int
31703170
FAILURE_POLICY_FIELD_NUMBER: builtins.int
31713171
name: builtins.str
31723172
"""The unique name for the job."""
@@ -3207,8 +3207,6 @@ class Job(google.protobuf.message.Message):
32073207
"point in time" string in the format of RFC3339, Go duration string
32083208
(calculated from job creation time), or non-repeating ISO8601.
32093209
"""
3210-
overwrite: builtins.bool
3211-
"""If true, allows this job to overwrite an existing job with the same name."""
32123210
@property
32133211
def data(self) -> google.protobuf.any_pb2.Any:
32143212
"""payload is the serialized job payload that will be sent to the recipient
@@ -3228,11 +3226,10 @@ class Job(google.protobuf.message.Message):
32283226
due_time: builtins.str | None = ...,
32293227
ttl: builtins.str | None = ...,
32303228
data: google.protobuf.any_pb2.Any | None = ...,
3231-
overwrite: builtins.bool = ...,
32323229
failure_policy: dapr.proto.common.v1.common_pb2.JobFailurePolicy | None = ...,
32333230
) -> None: ...
32343231
def HasField(self, field_name: typing.Literal["_due_time", b"_due_time", "_failure_policy", b"_failure_policy", "_repeats", b"_repeats", "_schedule", b"_schedule", "_ttl", b"_ttl", "data", b"data", "due_time", b"due_time", "failure_policy", b"failure_policy", "repeats", b"repeats", "schedule", b"schedule", "ttl", b"ttl"]) -> builtins.bool: ...
3235-
def ClearField(self, field_name: typing.Literal["_due_time", b"_due_time", "_failure_policy", b"_failure_policy", "_repeats", b"_repeats", "_schedule", b"_schedule", "_ttl", b"_ttl", "data", b"data", "due_time", b"due_time", "failure_policy", b"failure_policy", "name", b"name", "overwrite", b"overwrite", "repeats", b"repeats", "schedule", b"schedule", "ttl", b"ttl"]) -> None: ...
3232+
def ClearField(self, field_name: typing.Literal["_due_time", b"_due_time", "_failure_policy", b"_failure_policy", "_repeats", b"_repeats", "_schedule", b"_schedule", "_ttl", b"_ttl", "data", b"data", "due_time", b"due_time", "failure_policy", b"failure_policy", "name", b"name", "repeats", b"repeats", "schedule", b"schedule", "ttl", b"ttl"]) -> None: ...
32363233
@typing.overload
32373234
def WhichOneof(self, oneof_group: typing.Literal["_due_time", b"_due_time"]) -> typing.Literal["due_time"] | None: ...
32383235
@typing.overload
@@ -3253,6 +3250,9 @@ class ScheduleJobRequest(google.protobuf.message.Message):
32533250
DESCRIPTOR: google.protobuf.descriptor.Descriptor
32543251

32553252
JOB_FIELD_NUMBER: builtins.int
3253+
OVERWRITE_FIELD_NUMBER: builtins.int
3254+
overwrite: builtins.bool
3255+
"""If true, allows this job to overwrite an existing job with the same name."""
32563256
@property
32573257
def job(self) -> global___Job:
32583258
"""The job details."""
@@ -3261,9 +3261,10 @@ class ScheduleJobRequest(google.protobuf.message.Message):
32613261
self,
32623262
*,
32633263
job: global___Job | None = ...,
3264+
overwrite: builtins.bool = ...,
32643265
) -> None: ...
32653266
def HasField(self, field_name: typing.Literal["job", b"job"]) -> builtins.bool: ...
3266-
def ClearField(self, field_name: typing.Literal["job", b"job"]) -> None: ...
3267+
def ClearField(self, field_name: typing.Literal["job", b"job", "overwrite", b"overwrite"]) -> None: ...
32673268

32683269
global___ScheduleJobRequest = ScheduleJobRequest
32693270

@@ -4103,24 +4104,6 @@ class ConversationToolsFunction(google.protobuf.message.Message):
41034104

41044105
DESCRIPTOR: google.protobuf.descriptor.Descriptor
41054106

4106-
@typing.final
4107-
class ParametersEntry(google.protobuf.message.Message):
4108-
DESCRIPTOR: google.protobuf.descriptor.Descriptor
4109-
4110-
KEY_FIELD_NUMBER: builtins.int
4111-
VALUE_FIELD_NUMBER: builtins.int
4112-
key: builtins.str
4113-
@property
4114-
def value(self) -> google.protobuf.any_pb2.Any: ...
4115-
def __init__(
4116-
self,
4117-
*,
4118-
key: builtins.str = ...,
4119-
value: google.protobuf.any_pb2.Any | None = ...,
4120-
) -> None: ...
4121-
def HasField(self, field_name: typing.Literal["value", b"value"]) -> builtins.bool: ...
4122-
def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ...
4123-
41244107
NAME_FIELD_NUMBER: builtins.int
41254108
DESCRIPTION_FIELD_NUMBER: builtins.int
41264109
PARAMETERS_FIELD_NUMBER: builtins.int
@@ -4131,7 +4114,7 @@ class ConversationToolsFunction(google.protobuf.message.Message):
41314114
used by the model to choose when and how to call the function.
41324115
"""
41334116
@property
4134-
def parameters(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, google.protobuf.any_pb2.Any]:
4117+
def parameters(self) -> google.protobuf.struct_pb2.Struct:
41354118
"""The parameters the functions accepts, described as a JSON Schema object.
41364119
See the [guide](https://platform.openai.com/docs/guides/function-calling) for examples,
41374120
and the [JSON Schema reference](https://json-schema.org/understanding-json-schema/) for documentation about the format.
@@ -4143,9 +4126,9 @@ class ConversationToolsFunction(google.protobuf.message.Message):
41434126
*,
41444127
name: builtins.str = ...,
41454128
description: builtins.str | None = ...,
4146-
parameters: collections.abc.Mapping[builtins.str, google.protobuf.any_pb2.Any] | None = ...,
4129+
parameters: google.protobuf.struct_pb2.Struct | None = ...,
41474130
) -> None: ...
4148-
def HasField(self, field_name: typing.Literal["_description", b"_description", "description", b"description"]) -> builtins.bool: ...
4131+
def HasField(self, field_name: typing.Literal["_description", b"_description", "description", b"description", "parameters", b"parameters"]) -> builtins.bool: ...
41494132
def ClearField(self, field_name: typing.Literal["_description", b"_description", "description", b"description", "name", b"name", "parameters", b"parameters"]) -> None: ...
41504133
def WhichOneof(self, oneof_group: typing.Literal["_description", b"_description"]) -> typing.Literal["description"] | None: ...
41514134

examples/jobs/job_management.py

Lines changed: 5 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -16,10 +16,10 @@ def main():
1616
with DaprClient() as client:
1717
# Example 0: Simple job without data (works without protobuf)
1818
print('0. Scheduling a simple job without data...', flush=True)
19-
simple_job = Job(name='simple-job', schedule='@every 30s', overwrite=True)
19+
simple_job = Job(name='simple-job', schedule='@every 30s')
2020

2121
try:
22-
client.schedule_job_alpha1(simple_job)
22+
client.schedule_job_alpha1(job=simple_job, overwrite=True)
2323
print(f'✓ Simple job scheduled successfully', flush=True)
2424
except Exception as e:
2525
print(f'✗ Failed to schedule simple job: {e}', flush=True)
@@ -33,11 +33,10 @@ def main():
3333
schedule='@every 30s',
3434
data=job_data,
3535
ttl='5m',
36-
overwrite=True,
3736
)
3837

3938
try:
40-
client.schedule_job_alpha1(recurring_job)
39+
client.schedule_job_alpha1(job=recurring_job, overwrite=True)
4140
print(f'✓ Recurring job scheduled successfully', flush=True)
4241
except Exception as e:
4342
print(f'✗ Failed to schedule recurring job: {e}', flush=True)
@@ -68,11 +67,10 @@ def main():
6867
schedule='@every 45s',
6968
data=create_job_data('Job with drop failure policy'),
7069
failure_policy=DropFailurePolicy(),
71-
overwrite=True,
7270
)
7371

7472
try:
75-
client.schedule_job_alpha1(drop_policy_job)
73+
client.schedule_job_alpha1(job=drop_policy_job, overwrite=True)
7674
print(f'✓ Job with drop failure policy scheduled successfully', flush=True)
7775
except Exception as e:
7876
print(f'✗ Failed to schedule job with drop policy: {e}', flush=True)
@@ -83,11 +81,10 @@ def main():
8381
schedule='@every 60s',
8482
data=create_job_data('Job with constant retry policy'),
8583
failure_policy=ConstantFailurePolicy(max_retries=3, interval_seconds=10),
86-
overwrite=True,
8784
)
8885

8986
try:
90-
client.schedule_job_alpha1(constant_policy_job)
87+
client.schedule_job_alpha1(job=constant_policy_job, overwrite=True)
9188
print(f'✓ Job with constant retry policy scheduled successfully', flush=True)
9289
except Exception as e:
9390
print(f'✗ Failed to schedule job with retry policy: {e}', flush=True)

tests/clients/fake_dapr_server.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -55,6 +55,7 @@ def __init__(self, grpc_port: int = 50001, http_port: int = 8080):
5555
self.workflow_options: Dict[str, str] = {}
5656
self.metadata: Dict[str, str] = {}
5757
self.jobs: Dict[str, api_v1.Job] = {}
58+
self.job_overwrites: Dict[str, bool] = {}
5859
self._next_exception = None
5960

6061
def start(self):
@@ -550,6 +551,7 @@ def ScheduleJobAlpha1(self, request, context):
550551

551552
# Store the job
552553
self.jobs[request.job.name] = request.job
554+
self.job_overwrites[request.job.name] = request.overwrite
553555

554556
return empty_pb2.Empty()
555557

tests/clients/test_dapr_grpc_client.py

Lines changed: 25 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1253,9 +1253,31 @@ def test_schedule_job_alpha1_success(self):
12531253
# Verify job was stored in fake server
12541254
self.assertIn('test-job', self._fake_dapr_server.jobs)
12551255
stored_job = self._fake_dapr_server.jobs['test-job']
1256+
stored_job_overwrite = self._fake_dapr_server.job_overwrites['test-job']
12561257
self.assertEqual(stored_job.name, 'test-job')
12571258
self.assertEqual(stored_job.schedule, '@every 1m')
1258-
self.assertEqual(stored_job.overwrite, False)
1259+
self.assertEqual(stored_job_overwrite, False)
1260+
# Verify data field is always set (even if empty)
1261+
self.assertTrue(stored_job.HasField('data'))
1262+
1263+
def test_schedule_job_alpha1_success_with_overwrite(self):
1264+
"""Test successful job scheduling."""
1265+
dapr = DaprGrpcClient(f'{self.scheme}localhost:{self.grpc_port}')
1266+
job = Job(name='test-job', schedule='@every 1m')
1267+
1268+
# Schedule the job
1269+
response = dapr.schedule_job_alpha1(job=job, overwrite=True)
1270+
1271+
# Verify response type
1272+
self.assertIsInstance(response, DaprResponse)
1273+
1274+
# Verify job was stored in fake server
1275+
self.assertIn('test-job', self._fake_dapr_server.jobs)
1276+
stored_job = self._fake_dapr_server.jobs['test-job']
1277+
stored_job_overwrite = self._fake_dapr_server.job_overwrites['test-job']
1278+
self.assertEqual(stored_job.name, 'test-job')
1279+
self.assertEqual(stored_job.schedule, '@every 1m')
1280+
self.assertEqual(stored_job_overwrite, True)
12591281
# Verify data field is always set (even if empty)
12601282
self.assertTrue(stored_job.HasField('data'))
12611283

@@ -1280,12 +1302,12 @@ def test_schedule_job_alpha1_success_with_data(self):
12801302
# Verify job was stored in fake server with all data
12811303
self.assertIn('test-job-with-data', self._fake_dapr_server.jobs)
12821304
stored_job = self._fake_dapr_server.jobs['test-job-with-data']
1305+
stored_job_overwrite = self._fake_dapr_server.job_overwrites['test-job-with-data']
12831306
self.assertEqual(stored_job.name, 'test-job-with-data')
12841307
self.assertEqual(stored_job.schedule, '@every 2m')
12851308
self.assertEqual(stored_job.repeats, 3)
12861309
self.assertEqual(stored_job.ttl, '10m')
1287-
self.assertEqual(stored_job.overwrite, False)
1288-
1310+
self.assertEqual(stored_job_overwrite, False)
12891311
# Verify data field contains the payload
12901312
self.assertTrue(stored_job.HasField('data'))
12911313
self.assertEqual(
@@ -1323,7 +1345,6 @@ def test_get_job_alpha1_success(self):
13231345
self.assertEqual(retrieved_job.schedule, '@every 1m')
13241346
self.assertEqual(retrieved_job.repeats, 5)
13251347
self.assertEqual(retrieved_job.ttl, '1h')
1326-
self.assertEqual(retrieved_job.overwrite, False)
13271348

13281349
def test_get_job_alpha1_validation_error(self):
13291350
"""Test validation error in job retrieval."""

tests/clients/test_dapr_grpc_client_async.py

Lines changed: 28 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1184,9 +1184,33 @@ async def test_schedule_job_alpha1_success(self):
11841184
# Verify job was stored in fake server
11851185
self.assertIn('async-test-job', self._fake_dapr_server.jobs)
11861186
stored_job = self._fake_dapr_server.jobs['async-test-job']
1187+
stored_job_overwrite = self._fake_dapr_server.job_overwrites['async-test-job']
11871188
self.assertEqual(stored_job.name, 'async-test-job')
11881189
self.assertEqual(stored_job.schedule, '@every 1m')
1189-
self.assertEqual(stored_job.overwrite, False)
1190+
self.assertEqual(stored_job_overwrite, False)
1191+
# Verify data field is always set (even if empty)
1192+
self.assertTrue(stored_job.HasField('data'))
1193+
1194+
await dapr.close()
1195+
1196+
async def test_schedule_job_alpha1_success_with_overwrite(self):
1197+
"""Test successful async job scheduling."""
1198+
dapr = DaprGrpcClientAsync(f'{self.scheme}localhost:{self.grpc_port}')
1199+
job = Job(name='async-test-job', schedule='@every 1m')
1200+
1201+
# Schedule the job with overwrite
1202+
response = await dapr.schedule_job_alpha1(job=job, overwrite=True)
1203+
1204+
# Verify response type
1205+
self.assertIsInstance(response, DaprResponse)
1206+
1207+
# Verify job was stored in fake server
1208+
self.assertIn('async-test-job', self._fake_dapr_server.jobs)
1209+
stored_job = self._fake_dapr_server.jobs['async-test-job']
1210+
stored_job_overwrite = self._fake_dapr_server.job_overwrites['async-test-job']
1211+
self.assertEqual(stored_job.name, 'async-test-job')
1212+
self.assertEqual(stored_job.schedule, '@every 1m')
1213+
self.assertEqual(stored_job_overwrite, True)
11901214
# Verify data field is always set (even if empty)
11911215
self.assertTrue(stored_job.HasField('data'))
11921216

@@ -1215,11 +1239,12 @@ async def test_schedule_job_alpha1_success_with_data(self):
12151239
# Verify job was stored in fake server with all data
12161240
self.assertIn('async-test-job-with-data', self._fake_dapr_server.jobs)
12171241
stored_job = self._fake_dapr_server.jobs['async-test-job-with-data']
1242+
stored_job_overwrite = self._fake_dapr_server.job_overwrites['async-test-job-with-data']
12181243
self.assertEqual(stored_job.name, 'async-test-job-with-data')
12191244
self.assertEqual(stored_job.schedule, '@every 2m')
12201245
self.assertEqual(stored_job.repeats, 3)
12211246
self.assertEqual(stored_job.ttl, '10m')
1222-
self.assertEqual(stored_job.overwrite, False)
1247+
self.assertEqual(stored_job_overwrite, False)
12231248

12241249
# Verify data field contains the payload
12251250
self.assertTrue(stored_job.HasField('data'))
@@ -1279,7 +1304,6 @@ async def test_get_job_alpha1_success(self):
12791304
self.assertEqual(retrieved_job.schedule, '@every 1m')
12801305
self.assertEqual(retrieved_job.repeats, 5)
12811306
self.assertEqual(retrieved_job.ttl, '1h')
1282-
self.assertEqual(retrieved_job.overwrite, False)
12831307

12841308
await dapr.close()
12851309

@@ -1353,11 +1377,10 @@ async def test_job_lifecycle(self):
13531377
data=data,
13541378
repeats=10,
13551379
ttl='30m',
1356-
overwrite=True,
13571380
)
13581381

13591382
# 1. Schedule the job
1360-
schedule_response = await dapr.schedule_job_alpha1(job)
1383+
schedule_response = await dapr.schedule_job_alpha1(job=job, overwrite=True)
13611384
self.assertIsInstance(schedule_response, DaprResponse)
13621385

13631386
# 2. Get the job and verify all fields
@@ -1366,7 +1389,6 @@ async def test_job_lifecycle(self):
13661389
self.assertEqual(retrieved_job.schedule, '@every 5m')
13671390
self.assertEqual(retrieved_job.repeats, 10)
13681391
self.assertEqual(retrieved_job.ttl, '30m')
1369-
self.assertTrue(retrieved_job.overwrite)
13701392
self.assertEqual(retrieved_job.data.value, b'{"lifecycle": "test"}')
13711393

13721394
# 3. Delete the job

0 commit comments

Comments
 (0)