The invalid_digest error occur
CompletedHello, I'm new to the Box sdk gen.
I'm trying to use the function "upload_file_part" to upload files, but I keep getting an "invalid_digest" error. I've tried many methods but still can't figure out the reason. I hope someone can provide some guidance.
Below is the error message :
Response:
Status code: 400
Headers:
{ 'Alt-Svc': 'h3=":443"; ma=2592000,h3-29=":443"; ma=2592000',
'Connection': 'close',
'Content-Length': '180',
'Content-Type': 'application/json;charset=utf-8',
'Date': 'Mon, 19 Aug 2024 01:12:38 GMT',
'Server': 'nginx',
'Strict-Transport-Security': 'max-age=31536000',
'Via': '1.1 google'}
Code: invalid_digest
Context Info:
{}
Request Id: 29247440dae0e14ba77c63ebf6ed659a
Help Url: None
Body:
{ 'code': 'invalid_digest',
'message': 'Digest header invalid: '
"b'\\x108u\\xf0\\x98|\\xd0o\\x10\\xec\\xe7\\xa5~\\xed1\\x81P\\xc7\\x13u'",
'request_id': '29247440dae0e14ba77c63ebf6ed659a'}
Raw body: {"code":"invalid_digest","message":"Digest header invalid: b'\\x108u\\xf0\\x98|\\xd0o\\x10\\xec\\xe7\\xa5~\\xed1\\x81P\\xc7\\x13u'","request_id":"29247440dae0e14ba77c63ebf6ed659a"}
file_path = 'D:/file-20MB.bin'
file_size = os.path.getsize(file_path)
# file_name = 'file-100MB.bin'
file_name = 'file-20MB.bin'
folder = client.folders.get_folder_by_id(folder_id)
file_id = None
# Manual upload
# Preflight check
try:
pre_flight_arg = PreflightFileUploadCheckParent(id=folder.id)
client.uploads.preflight_file_upload_check(name=file_name,
size=file_size,
parent=pre_flight_arg)
except BoxAPIError as err:
print('Error code :', err.response_info.body.get("code", None))
if err.response_info.body.get("code", None) == "item_name_in_use":
file_id = err.response_info.body["context_info"]["conflicts"]["id"]
else:
raise err
print('file_id :', file_id)
# Chucked upload
upload_arg = UploadFileAttributes(file_name, UploadFileAttributesParentField(folder.id))
if file_id is None:
# upload new file
upload_session = client.chunked_uploads.create_file_upload_session(folder.id, file_size, file_name)
# file = files.entries[0]
else:
# upload new version
upload_session = client.chunked_uploads.create_file_upload_session_for_existing_file(file_id, file_size)
# file = files.entries[0]
print('upload_session :', upload_session)
file_sha1 = hashlib.sha1()
parts = []
with open(file_path, "rb") as file_stream:
for part_num in range(upload_session.total_parts):
copied_length = 0
chunk = b""
while copied_length < upload_session.part_size:
bytes_read = file_stream.read(upload_session.part_size - copied_length)
if bytes_read is None:
# stream returns none when no bytes are ready currently
# but there are potentially more bytes in the stream
# to be read.
continue
if len(bytes_read) == 0:
# stream is exhausted.
break
chunk += bytes_read
copied_length += len(bytes_read)
content_range = (part_num * upload_session.part_size)
print('content_range :', content_range)
file_sha1.update(chunk)
print('file_sha1 :', file_sha1)
fileDigest = file_sha1.digest()
print('fileDigest :', fileDigest)
uploaded_part = client.chunked_uploads.upload_file_part(upload_session.id,
chunk,
fileDigest,
content_range)
parts.append(uploaded_part)
Thanks!
San
Post is closed for comments.
Comments
1 comment