Skip to content

Commit eb0fd78

Browse files
authored
Merge pull request #530 from microsoftgraph/shem/large_file_upload
Large File Upload Task
2 parents 1218c18 + 9754d1c commit eb0fd78

9 files changed

+450
-4
lines changed

.gitignore

+1
Original file line numberDiff line numberDiff line change
@@ -133,3 +133,4 @@ dmypy.json
133133
.idea/
134134

135135
app*.py
136+
app*

CHANGELOG.md

+8
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,14 @@ All notable changes to this project will be documented in this file.
55
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
66
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
77

8+
## [1.0.1] - 2024-04-22
9+
10+
### Added
11+
12+
### Changed
13+
-Enabled Large File Upload and Page iterator support
14+
15+
816
## [1.0.0] - 2023-10-31
917

1018
### Added

src/msgraph_core/_constants.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -8,5 +8,5 @@
88
"""
99
DEFAULT_REQUEST_TIMEOUT = 100
1010
DEFAULT_CONNECTION_TIMEOUT = 30
11-
SDK_VERSION = '1.0.0'
11+
SDK_VERSION = '1.0.1'
1212
MS_DEFAULT_SCOPE = 'https://graph.microsoft.com/.default'

src/msgraph_core/models/__init__.py

+3-1
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,5 @@
11
from .page_result import PageResult
2+
from .large_file_upload_session import LargeFileUploadSession
3+
from .upload_result import UploadResult, UploadSessionDataHolder
24

3-
__all__ = ['PageResult']
5+
__all__ = ['PageResult', 'LargeFileUploadSession', 'UploadResult', 'UploadSessionDataHolder']
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,66 @@
1+
from __future__ import annotations
2+
from typing import Any, Callable, Dict, List, Optional, TYPE_CHECKING, Union
3+
import datetime
4+
from dataclasses import dataclass, field
5+
6+
from kiota_abstractions.serialization import (
7+
AdditionalDataHolder, Parsable, ParseNode, SerializationWriter
8+
)
9+
10+
11+
@dataclass
12+
class LargeFileUploadSession(AdditionalDataHolder, Parsable):
13+
14+
additional_data: Dict[str, Any] = field(default_factory=dict)
15+
expiration_date_time: Optional[datetime.datetime] = None
16+
next_expected_ranges: Optional[List[str]] = None
17+
is_cancelled: Optional[bool] = False
18+
odata_type: Optional[str] = None
19+
# The URL endpoint that accepts PUT requests for byte ranges of the file.
20+
upload_url: Optional[str] = None
21+
22+
@staticmethod
23+
def create_from_discriminator_value(
24+
parse_node: Optional[ParseNode] = None
25+
) -> LargeFileUploadSession:
26+
"""
27+
Creates a new instance of the appropriate class based
28+
on discriminator value param parse_node: The parse node
29+
to use to read the discriminator value and create the object
30+
Returns: UploadSession
31+
"""
32+
if not parse_node:
33+
raise TypeError("parse_node cannot be null.")
34+
return LargeFileUploadSession()
35+
36+
def get_field_deserializers(self, ) -> Dict[str, Callable[[ParseNode], None]]:
37+
"""
38+
The deserialization information for the current model
39+
Returns: Dict[str, Callable[[ParseNode], None]]
40+
"""
41+
fields: Dict[str, Callable[[Any], None]] = {
42+
"expirationDateTime":
43+
lambda n: setattr(self, 'expiration_date_time', n.get_datetime_value()),
44+
"nextExpectedRanges":
45+
lambda n:
46+
setattr(self, 'next_expected_ranges', n.get_collection_of_primitive_values(str)),
47+
"@odata.type":
48+
lambda n: setattr(self, 'odata_type', n.get_str_value()),
49+
"uploadUrl":
50+
lambda n: setattr(self, 'upload_url', n.get_str_value()),
51+
}
52+
return fields
53+
54+
def serialize(self, writer: SerializationWriter) -> None:
55+
"""
56+
Serializes information the current object
57+
param writer: Serialization writer to use to serialize this model
58+
Returns: None
59+
"""
60+
if not writer:
61+
raise TypeError("writer cannot be null.")
62+
writer.write_datetime_value("expirationDateTime", self.expiration_date_time)
63+
writer.write_collection_of_primitive_values("nextExpectedRanges", self.next_expected_ranges)
64+
writer.write_str_value("@odata.type", self.odata_type)
65+
writer.write_str_value("uploadUrl", self.upload_url)
66+
writer.write_additional_data_value(self.additional_data)

src/msgraph_core/models/page_result.py

+1-2
Original file line numberDiff line numberDiff line change
@@ -10,14 +10,13 @@
1010
PageResult: Represents a page of items in a paged response.
1111
"""
1212
from __future__ import annotations
13-
from typing import List, Optional, Dict, Callable
13+
from typing import List, Optional, Dict, Callable, TypeVar
1414
from dataclasses import dataclass
1515

1616
from kiota_abstractions.serialization.parsable import Parsable
1717
from kiota_abstractions.serialization.serialization_writer \
1818
import SerializationWriter
1919
from kiota_abstractions.serialization.parse_node import ParseNode
20-
from typing import TypeVar, List, Optional
2120

2221
T = TypeVar('T')
2322

+62
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,62 @@
1+
from typing import Any, Callable, Dict, List, Optional, TypeVar
2+
3+
from dataclasses import dataclass
4+
from datetime import datetime
5+
6+
from kiota_abstractions.serialization import (
7+
AdditionalDataHolder, Parsable, ParseNode, SerializationWriter
8+
)
9+
10+
T = TypeVar('T')
11+
12+
13+
@dataclass
14+
class UploadSessionDataHolder(AdditionalDataHolder, Parsable):
15+
expiration_date_time: Optional[datetime] = None
16+
next_expected_ranges: Optional[List[str]] = None
17+
upload_url: Optional[str] = None
18+
odata_type: Optional[str] = None
19+
20+
def get_field_deserializers(self, ) -> Dict[str, Callable[[ParseNode], None]]:
21+
"""
22+
The deserialization information for the current model
23+
Returns: Dict[str, Callable[[ParseNode], None]]
24+
"""
25+
fields: Dict[str, Callable[[Any], None]] = {
26+
"expirationDateTime":
27+
lambda n: setattr(self, 'expiration_date_time', n.get_datetime_value()),
28+
"nextExpectedRanges":
29+
lambda n:
30+
setattr(self, 'next_expected_ranges', n.get_collection_of_primitive_values(str)),
31+
"@odata.type":
32+
lambda n: setattr(self, 'odata_type', n.get_str_value()),
33+
"uploadUrl":
34+
lambda n: setattr(self, 'upload_url', n.get_str_value()),
35+
}
36+
return fields
37+
38+
def serialize(self, writer: SerializationWriter) -> None:
39+
"""
40+
Serializes information the current object
41+
param writer: Serialization writer to use to serialize this model
42+
Returns: None
43+
"""
44+
if not writer:
45+
raise TypeError("writer cannot be null.")
46+
writer.write_datetime_value("expirationDateTime", self.expiration_date_time)
47+
writer.write_collection_of_primitive_values("nextExpectedRanges", self.next_expected_ranges)
48+
writer.write_str_value("@odata.type", self.odata_type)
49+
writer.write_str_value("uploadUrl", self.upload_url)
50+
writer.write_additional_data_value(self.additional_data)
51+
52+
53+
class UploadResult:
54+
55+
def __init__(self):
56+
self.upload_session: Optional[UploadSessionDataHolder] = None
57+
self.item_response: Optional[T] = None
58+
self.location: Optional[str] = None
59+
60+
@property
61+
def upload_succeeded(self) -> bool:
62+
return self.item_response is not None or self.location is not None

src/msgraph_core/tasks/__init__.py

+3
Original file line numberDiff line numberDiff line change
@@ -1 +1,4 @@
11
from .page_iterator import PageIterator
2+
from .large_file_upload import LargeFileUploadTask
3+
4+
__all__ = ['PageIterator', 'LargeFileUploadTask']

0 commit comments

Comments
 (0)