Skip to content

Commit 9831fa5

Browse files
committed
fixed response type for drive item
1 parent 9754d1c commit 9831fa5

File tree

1 file changed

+50
-27
lines changed

1 file changed

+50
-27
lines changed

src/msgraph_core/tasks/large_file_upload.py

Lines changed: 50 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -4,13 +4,16 @@
44
from asyncio import Future
55
from datetime import datetime, timedelta, timezone
66
import logging
7+
import httpx
78

89
from kiota_abstractions.serialization.parsable import Parsable
910
from kiota_abstractions.method import Method
1011
from kiota_abstractions.headers_collection import HeadersCollection
1112
from kiota_abstractions.request_information import RequestInformation
13+
from kiota_abstractions.native_response_handler import NativeResponseHandler
1214
from kiota_abstractions.serialization.additional_data_holder import AdditionalDataHolder
1315
from kiota_abstractions.serialization.parsable_factory import ParsableFactory
16+
from msgraph.generated.models.attachment_item import AttachmentItem
1417

1518
from kiota_abstractions.request_adapter import RequestAdapter
1619

@@ -25,7 +28,7 @@ def __init__(
2528
request_adapter: RequestAdapter,
2629
stream: BytesIO,
2730
parsable_factory: Optional[ParsableFactory] = None,
28-
max_chunk_size: int = 5 * 1024 * 1024
31+
max_chunk_size: int = 3 * 1024 * 1024 #5 - 409600
2932
):
3033
self._upload_session = upload_session
3134
self._request_adapter = request_adapter
@@ -101,40 +104,45 @@ async def upload(self, after_chunk_upload: Optional[Callable] = None):
101104
end = min(int(range_parts[0]) + self.max_chunk_size - 1, self.file_size)
102105
uploaded_range = [range_parts[0], end]
103106
response = None
107+
upload_result = UploadResult()
104108

105109
while self.chunks >= 0:
106110
session = process_next
107-
print(f"Chunks for upload : {self.chunks}")
108111
if self.chunks == 0:
109112
# last chunk
110-
print(f"Last chunk: {self.chunks} upload stated")
111113
response = await self.last_chunk(self.stream)
112-
print("Last chunk response: received")
113-
114-
try:
115-
lfu_session: LargeFileUploadSession = session # type: ignore
116-
if lfu_session is None:
117-
continue
118-
next_range = lfu_session.next_expected_ranges
119-
old_url = self.get_validated_upload_url(self.upload_session)
120-
lfu_session.upload_url = old_url
121-
if self.on_chunk_upload_complete is not None:
122-
self.on_chunk_upload_complete(uploaded_range)
123-
if not next_range:
124-
continue
125-
range_parts = str(next_range[0]).split("-")
126-
end = min(int(range_parts[0]) + self.max_chunk_size, self.file_size)
127-
uploaded_range = [range_parts[0], end]
128-
self.next_range = next_range[0] + "-"
129-
process_next = await self.next_chunk(self.stream)
130-
131-
except Exception as error:
132-
logging.error("Error uploading chunk %s", error)
133-
finally:
134-
self.chunks -= 1
114+
# upload_result.location = response.headers[
115+
# 'Location'] # to be fixed for attachement item Response
116+
117+
if isinstance(session, LargeFileUploadSession):
118+
try:
119+
lfu_session: LargeFileUploadSession = session # type: ignore
120+
if lfu_session is None:
121+
continue
122+
next_range = lfu_session.next_expected_ranges
123+
old_url = self.get_validated_upload_url(self.upload_session)
124+
lfu_session.upload_url = old_url
125+
if self.on_chunk_upload_complete is not None:
126+
self.on_chunk_upload_complete(uploaded_range)
127+
if not next_range:
128+
continue
129+
range_parts = str(next_range[0]).split("-")
130+
end = min(int(range_parts[0]) + self.max_chunk_size, self.file_size)
131+
uploaded_range = [range_parts[0], end]
132+
self.next_range = next_range[0] + "-"
133+
process_next = await self.next_chunk(self.stream)
134+
135+
except Exception as error:
136+
logging.error("Error uploading chunk %s", error)
137+
finally:
138+
self.chunks -= 1
139+
else:
140+
response = session
141+
logging.info(f"Response headers: {response.headers}")
142+
logging.info(f"Response content: {response.content}")
143+
break
135144
upload_result = UploadResult()
136145
upload_result.item_response = response
137-
upload_result.location = self.upload_session.upload_url
138146
return upload_result
139147

140148
@property
@@ -177,6 +185,13 @@ async def next_chunk(self, file: BytesIO, range_start: int = 0, range_end: int =
177185
info.headers.try_add("Content-Type", "application/octet-stream")
178186
info.set_stream_content(bytes(chunk_data))
179187
error_map: Dict[str, int] = {}
188+
if self.factory is AttachmentItem:
189+
headers = {key: ', '.join(value) for key, value in info.headers.get_all().items()}
190+
async with httpx.AsyncClient() as client:
191+
response = await client.put(
192+
self.upload_session.upload_url, headers=headers, data=info.content
193+
)
194+
return response
180195
parsable_factory = LargeFileUploadSession
181196
return await self.request_adapter.send_async(info, parsable_factory, error_map)
182197

@@ -218,6 +233,14 @@ async def last_chunk(
218233
info.headers.try_add("Content-Type", "application/octet-stream")
219234
info.set_stream_content(bytes(chunk_data))
220235
error_map: Dict[str, int] = {}
236+
if self.factory is AttachmentItem:
237+
headers = {key: ', '.join(value) for key, value in info.headers.get_all().items()}
238+
239+
async with httpx.AsyncClient() as client:
240+
response = await client.put(
241+
self.upload_session.upload_url, headers=headers, data=info.content
242+
)
243+
return response
221244
parsable_factory = self.factory or parsable_factory
222245
return await self.request_adapter.send_async(info, parsable_factory, error_map)
223246

0 commit comments

Comments
 (0)