@@ -331,7 +331,6 @@ async def fix_style_task(event):
331331 event .data ["issue" ]["comments_url" ], {}, data = {"body" : message }
332332 )
333333
334-
335334async def find_latest_pipeline (url , headers , session ):
336335 async with session .get (url , headers = headers ) as response :
337336 pipeline_objects = await response .json ()
@@ -394,7 +393,7 @@ async def download_spack_lock_files(url, headers, download_dir, session):
394393
395394 return folder_list
396395
397- async def copy_pr_binaries (pr_number , pr_branch , shared_pr_mirror_url ):
396+ async def copy_pr_binaries (stack , pr_number , pr_branch , shared_pr_mirror_url ):
398397 """Find the latest gitlab pipeline for the PR, get the spack.lock
399398 for each child pipeline, and for each one, activate the environment
400399 and issue the spack buildcache sync command to copy between the
@@ -458,19 +457,26 @@ async def copy_pr_binaries(pr_number, pr_branch, shared_pr_mirror_url):
458457 pr_mirror_url
459458 ])
460459
461- def rq_has_reindex ():
462- ltask_q = work_queue .get_lqueue ()
463- for job in ltask_q .jobs :
464- if "update_mirror_index" in job .func_name :
465- return True
466- return False
467-
460+ # Upate index per stack mirror
468461async def update_mirror_index (mirror_url ):
469462 """Use spack buildcache command to update index on remote mirror"""
470463
464+ # Current job stack
465+ job = get_current_job ()
466+ stack = job .meta ["info" ]["stack" ]
467+
468+ # Check if another reindex for this stack is queued
469+ do_reindex = True
470+ ltask_q = work_queue .get_lqueue ()
471+ for job in ltask_q .jobs :
472+ info = job .meta ["info" ]
473+ if info ["type" ] == "reindex" and info ["stack" ] == stack :
474+ do_reindex = False
475+ break
476+
471477 # Check the queue for more reindex jobs, if there are none,
472478 # run reindex on the graduated PR mirror.
473- if not rq_has_reindex () :
479+ if do_reindex :
474480 print (f"Updating binary index at { mirror_url } " )
475481 await helpers .run_in_subprocess ([
476482 "spack" ,
@@ -484,20 +490,22 @@ async def update_mirror_index(mirror_url):
484490
485491def hash_from_key (key ):
486492 h = None
493+ # 32 chars long
487494 if key .lower ().endswith (("spec.json" )):
488495 h = re .search (".*-([a-zA-Z0-9]+)\.spec\.json" , key .lower ())
489496 elif key .lower ().endswith (("spack" )):
490497 h = re .search (".*-([a-zA-Z0-9]+)\.spack" , key .lower ())
491498 return h
492499
493- async def prune_mirror_duplicates ():
500+ # Prune per stack mirror
501+ async def prune_mirror_duplicates (pr_mirror_bucket , publish_mirror_bucket ):
494502 s3 = boto3 .resource ("s3" )
495- pr_bucket = s3 .Bucket (helpers . pr_mirror_bucket )
496- publish_bucket = s3 .Bucket (helpers . publish_mirror_bucket )
503+ pr_bucket = s3 .Bucket (pr_mirror_bucket )
504+ publish_bucket = s3 .Bucket (publish_mirror_bucket )
497505
498506 # Get the hashes in the shared PR bucket
499507 pr_specs = {}
500- for obj in pr_bucket .filter ( Prefix = helpers . pr_shared_bucket ). objects ():
508+ for obj in pr_bucket .objects ():
501509 if not obj .key .endswith (("spec.json" )):
502510 continue
503511
@@ -506,12 +514,12 @@ async def prune_mirror_duplicates():
506514 # Clean up anything that didn't work and log an error. This should never happen
507515 # but also shouldn't stop the pruning.
508516 if None in pr_specs :
509- logger .error ("Encountered hashless spec.json in shared PR mirror" )
517+ logger .error ("Encountered hashless spec.json in PR mirror" )
510518 pr_specs .remove (None )
511519
512520 # Check in the published base branch bucket for duplicates to delete
513521 delete_specs = {}
514- for obj in publish_bucket .filter ( Prefix = helpers . pr_expected_base ). objects ():
522+ for obj in publish_bucket .objects ():
515523 if not obj .key .endswith (("spec.json" )):
516524 continue
517525
@@ -520,13 +528,12 @@ async def prune_mirror_duplicates():
520528 delete_specs .add (spec_hash )
521529
522530 # Delete all of the objects with marked hashes
523- if delete_specs :
524- logger .info ("Pruning PR mirror" )
525- for obj in pr_bucket .filter (Prefix = helpers .pr_shared_bucket ).objects ():
531+ for obj in pr_bucket .objects ():
526532 if not obj .key .endswith ((".spec.json" , ".spack" )):
527533 continue
528534
529535 if hash_from_key (obj .key ) in delete_specs :
530- logger .debug (f"""pr mirror pruning is deleteing { obj .key }
531- from s3://{ helpers .pr_mirror_bucket } /{ helpers .pr_shared_bucket } """ )
536+ logger .debug (
537+ f"pr mirror pruning is deleteing { obj .key } from s3://{ pr_mirror_bucket } "
538+ )
532539 obj .delete ()
0 commit comments