Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
33 changes: 16 additions & 17 deletions b2sdk/_internal/transfer/outbound/upload_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -204,18 +204,17 @@ def _upload_small_file(
content_length = upload_source.get_content_length()
exception_info_list = []
progress_listener.set_total_bytes(content_length)
for _ in range(self.MAX_UPLOAD_ATTEMPTS):
try:
with upload_source.open() as file:
input_stream = ReadingStreamWithProgress(
file, progress_listener, length=content_length
)
if upload_source.is_sha1_known():
content_sha1 = upload_source.get_content_sha1()
else:
input_stream = StreamWithHash(input_stream, stream_length=content_length)
content_sha1 = HEX_DIGITS_AT_END
# it is important that `len()` works on `input_stream`
with upload_source.open() as file:
input_stream = ReadingStreamWithProgress(file, progress_listener, length=content_length)
if upload_source.is_sha1_known():
content_sha1 = upload_source.get_content_sha1()
else:
input_stream = StreamWithHash(input_stream, stream_length=content_length)
content_sha1 = HEX_DIGITS_AT_END
# it is important that `len()` works on `input_stream`

for _ in range(self.MAX_UPLOAD_ATTEMPTS):
try:
response = self.services.session.upload_file(
bucket_id,
file_name,
Expand All @@ -236,10 +235,10 @@ def _upload_small_file(
), '{} != {}'.format(content_sha1, response['contentSha1'])
return self.services.api.file_version_factory.from_api_response(response)

except B2Error as e:
if not e.should_retry_upload():
raise
exception_info_list.append(e)
self.account_info.clear_bucket_upload_data(bucket_id)
except B2Error as e:
if not e.should_retry_upload():
raise
exception_info_list.append(e)
self.account_info.clear_bucket_upload_data(bucket_id)

raise MaxRetriesExceeded(self.MAX_UPLOAD_ATTEMPTS, exception_info_list)
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Fixed a retry bug in `upload_unbound_stream()` small-file uploads where a retryable upload error could cause a one-shot buffered stream to be reopened after it was closed, raising `ValueError: I/O operation on closed file`.
5 changes: 5 additions & 0 deletions test/unit/bucket/test_bucket.py
Original file line number Diff line number Diff line change
Expand Up @@ -1918,6 +1918,11 @@ def test_upload_one_retryable_error(self):
data = b'hello world'
self.bucket.upload_bytes(data, 'file1')

def test_upload_unbound_stream_one_retryable_error(self):
self.simulator.set_upload_errors([CanRetry(True)])
data = b'hello world'
self.bucket.upload_unbound_stream(io.BytesIO(data), 'file1')

def test_upload_timeout(self):
self.simulator.set_upload_errors([B2RequestTimeoutDuringUpload()])
data = b'hello world'
Expand Down