Skip to content

Commit 8fbb37d

Browse files
authored
revert excess retries (#3)
1 parent fc450cb commit 8fbb37d

File tree

3 files changed

+17
-84
lines changed

3 files changed

+17
-84
lines changed

dataflux_core/download.py

Lines changed: 14 additions & 36 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
"""
2-
Copyright 2023 Google LLC
2+
Copyright 2024 Google LLC
33
44
Licensed under the Apache License, Version 2.0 (the "License");
55
you may not use this file except in compliance with the License.
@@ -62,27 +62,16 @@ def compose(
6262

6363
if storage_client is None:
6464
storage_client = storage.Client(project=project_name)
65-
retries = 3
66-
while True:
67-
try:
68-
bucket = storage_client.bucket(bucket_name)
69-
destination = bucket.blob(destination_blob_name)
70-
71-
sources = list()
72-
for each_object in objects:
73-
blob_name = each_object[0]
74-
sources.append(bucket.blob(blob_name))
75-
76-
destination.compose(sources, retry=MODIFIED_RETRY)
77-
break
78-
except Exception as e:
79-
retries -= 1
80-
logging.error(
81-
f"compose encountered error ({retries} retries left): {str(e)}"
82-
)
83-
if retries <= 0:
84-
raise RuntimeError(f"compose is out of retries; exiting: {e}")
85-
continue
65+
66+
bucket = storage_client.bucket(bucket_name)
67+
destination = bucket.blob(destination_blob_name)
68+
69+
sources = list()
70+
for each_object in objects:
71+
blob_name = each_object[0]
72+
sources.append(bucket.blob(blob_name))
73+
74+
destination.compose(sources, retry=MODIFIED_RETRY)
8675

8776
return destination
8877

@@ -146,20 +135,9 @@ def download_single(
146135
Returns:
147136
the contents of the object in bytes.
148137
"""
149-
retries = 3
150-
while True:
151-
try:
152-
bucket_handle = storage_client.bucket(bucket_name)
153-
blob = bucket_handle.blob(object_name)
154-
return blob.download_as_bytes(retry=MODIFIED_RETRY)
155-
except Exception as e:
156-
retries -= 1
157-
logging.error(
158-
f"download_single encountered error ({retries} retries left): {str(e)}"
159-
)
160-
if retries <= 0:
161-
raise RuntimeError(f"download_single is out of retries; exiting: {e}")
162-
continue
138+
bucket_handle = storage_client.bucket(bucket_name)
139+
blob = bucket_handle.blob(object_name)
140+
return blob.download_as_bytes(retry=MODIFIED_RETRY)
163141

164142

165143
class DataFluxDownloadOptimizationParams:

dataflux_core/tests/fake_gcs.py

Lines changed: 2 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
"""
2-
Copyright 2023 Google LLC
2+
Copyright 2024 Google LLC
33
44
Licensed under the Apache License, Version 2.0 (the "License");
55
you may not use this file except in compliance with the License.
@@ -98,15 +98,10 @@ class Client(object):
9898
def __init__(self):
9999
self.buckets: dict[str, Bucket] = dict()
100100
self.content: dict[str, tuple[str, str]] = dict()
101-
# This can be set to indicate how many sequential errors to trigger before passing.
102-
self.error_count = 0
103101

104102
def bucket(self, name: str) -> Bucket:
105-
if self.error_count > 0:
106-
self.error_count -= 1
107-
raise Exception("Error")
108103
if name not in self.buckets:
109104
self.buckets[name] = Bucket(name)
110105
if name in self.content:
111106
self.buckets[name].content = self.content[name]
112-
return self.buckets[name]
107+
return self.buckets[name]

dataflux_core/tests/test_download.py

Lines changed: 1 addition & 41 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
"""
2-
Copyright 2023 Google LLC
2+
Copyright 2024 Google LLC
33
44
Licensed under the Apache License, Version 2.0 (the "License");
55
you may not use this file except in compliance with the License.
@@ -35,29 +35,6 @@ def test_compose(self):
3535
self.assertEqual(blob.name, destination_blob_name)
3636
self.assertEqual(blob.content, expected_result)
3737

38-
def test_compose_retries(self):
39-
bucket_name = "test_bucket"
40-
destination_blob_name = "dest_name"
41-
objects = [("one", 3), ("two", 3), ("three", 5)]
42-
client = fake_gcs.Client()
43-
bucket = client.bucket(bucket_name)
44-
bucket._add_file("one", bytes("one", "utf-8"))
45-
bucket._add_file("two", bytes("two", "utf-8"))
46-
bucket._add_file("three", bytes("three", "utf-8"))
47-
client.error_count = 2
48-
expected_result = b"onetwothree"
49-
blob = download.compose("", bucket_name, destination_blob_name, objects, client)
50-
self.assertEqual(blob.name, destination_blob_name)
51-
self.assertEqual(blob.content, expected_result)
52-
client.error_count = 3
53-
try:
54-
blob = download.compose(
55-
"", bucket_name, destination_blob_name, objects, client
56-
)
57-
self.fail(f"expected RuntimeError but got {blob}")
58-
except RuntimeError:
59-
pass
60-
6138
def test_decompose(self):
6239
bucket_name = "test_bucket"
6340
object_name = "test_obj"
@@ -78,23 +55,6 @@ def test_download_single(self):
7855
result = download.download_single(client, bucket_name, object_name)
7956
self.assertEqual(result, content)
8057

81-
def test_download_retry(self):
82-
client = fake_gcs.Client()
83-
bucket_name = "test_bucket"
84-
object_name = "test_obj"
85-
content = bytes("onetwothree", "utf-8")
86-
bucket = client.bucket(bucket_name)
87-
bucket._add_file(object_name, content)
88-
client.error_count = 2
89-
result = download.download_single(client, bucket_name, object_name)
90-
self.assertEqual(result, content)
91-
client.error_count = 3
92-
try:
93-
result = download.download_single(client, bucket_name, object_name)
94-
self.fail(f"expected RuntimeError but got {result}")
95-
except RuntimeError:
96-
pass
97-
9858
def test_dataflux_download(self):
9959
bucket_name = "test_bucket"
10060
objects = [("one", 3), ("two", 3), ("three", 5)]

0 commit comments

Comments
 (0)