@@ -271,9 +271,11 @@ async def _generate_sample(
271271 meta_prompt_nonce = f'{ prompt_idx } '
272272
273273 async with async_semaphore :
274+ output = None
274275 if cache is not None :
275276 output = await cache .get (meta_prompt , nonce = meta_prompt_nonce )
276- else :
277+
278+ if output is None : # Cache miss or no cache - make API call
277279 response = await client .chat .completions .create (
278280 model = model_name ,
279281 messages = [{'role' : 'user' , 'content' : meta_prompt }],
@@ -306,9 +308,11 @@ async def _generate_sample(
306308 task_prompt = task_prompt1 + task_prompt2
307309
308310 async with async_semaphore :
311+ output = None
309312 if cache is not None :
310313 output = await cache .get (task_prompt1 + task_prompt2 )
311- else :
314+
315+ if output is None : # Cache miss or no cache - make API calls
312316 futures = []
313317 for response_prompt in [task_prompt1 , task_prompt2 ]:
314318 coro = client .chat .completions .create (
@@ -346,9 +350,11 @@ async def _generate_sample(
346350 else :
347351 task_prompt = response_mapper .generate_prompt (task , prompt )
348352 async with async_semaphore :
353+ output = None
349354 if cache is not None :
350355 output = await cache .get (task_prompt )
351- else :
356+
357+ if output is None : # Cache miss or no cache - make API call
352358 response = await client .chat .completions .create (
353359 model = model_name ,
354360 messages = [{'role' : 'user' , 'content' : task_prompt }],
0 commit comments