From fee33c28dfacb663dca8c3c12c9e77b08835220f Mon Sep 17 00:00:00 2001 From: PPPayson Date: Mon, 15 Sep 2025 13:42:04 -0400 Subject: [PATCH 01/16] Large Data Processing --- .gitignore | 4 +- ceiling_floor_estimate.py | 217 ++++-- ceiling_floor_estimate_large.py | 665 ++++++++++++++++++ .../imagenet_val_oscar_max_10.yaml | 46 ++ .../imagenet_val_oscar_max_15.yaml | 46 ++ .../imagenet_val_oscar_max_20.yaml | 46 ++ .../imagenet_val_oscar_max_5.yaml | 46 ++ configs/{ => co3d_configs}/co3d_train.yaml | 0 .../{ => co3d_configs}/co3d_train_oscar.yaml | 0 configs/{ => co3d_configs}/co3d_val.yaml | 0 .../co3d_val_auc_oscar.yaml} | 0 .../co3d_configs/co3d_val_spearman_oscar.yaml | 41 ++ .../co3d_constancy_val.yaml | 0 .../co3d_constancy_val_oscar.yaml | 0 .../constancy_ceiling_auc.yaml | 2 +- .../constancy_ceiling_emd.yaml | 45 ++ .../constancy_ceiling_rank_cosine.yaml | 45 ++ .../constancy_ceiling_rank_pearson.yaml | 45 ++ .../constancy_ceiling_spearman.yaml | 2 +- .../imagenet_val_oscar_max_10.yaml | 46 ++ .../imagenet_val_oscar_max_15.yaml | 46 ++ .../imagenet_val_oscar_max_20.yaml | 46 ++ .../imagenet_val_oscar_max_25.yaml | 46 ++ .../imagenet_val_oscar_max_30.yaml | 46 ++ .../exp_configs/imagenet_val_oscar_max_5.yaml | 46 ++ .../imagenet_co3d_val_oscar.yaml | 16 +- .../imagenet_train_oscar.yaml | 14 +- .../imagenet_val_oscar.yaml | 19 +- .../imagenet_val_spearman_oscar.yaml | 44 ++ .../jay_imagenet_for_co3d_train_0.1.yaml | 2 +- .../jay_imagenet_for_co3d_val_0.1.yaml | 2 +- configs/jay_imagenet_co3d_val_04_02_2025.yaml | 39 - configs/jay_imagenet_co3d_val_04_30_2025.yaml | 39 - configs/jay_imagenet_train_0.1.yaml | 36 - configs/jay_imagenet_train_02_19_2025.yaml | 37 - configs/jay_imagenet_train_04_02_2025.yaml | 43 -- configs/jay_imagenet_train_04_23_2025.yaml | 41 -- configs/jay_imagenet_train_04_30_2025.yaml | 41 -- configs/jay_imagenet_train_12_18_2024.yaml | 37 - configs/jay_imagenet_val_0.1.yaml | 37 - configs/jay_imagenet_val_02_19_2025.yaml | 37 - configs/jay_imagenet_val_04_02_2025.yaml | 39 - configs/jay_imagenet_val_04_23_2025.yaml | 37 - configs/jay_imagenet_val_04_30_2025.yaml | 37 - configs/jay_imagenet_val_12_18_2024.yaml | 36 - prepare_clickmaps.py | 3 + sample_clickmaps.py | 57 ++ scripts/process_imgnet.sh | 2 + scripts/run_exp.sh | 11 + src/utils.py | 104 +-- 50 files changed, 1670 insertions(+), 666 deletions(-) create mode 100644 ceiling_floor_estimate_large.py create mode 100644 configs/balance_exp_configs/imagenet_val_oscar_max_10.yaml create mode 100644 configs/balance_exp_configs/imagenet_val_oscar_max_15.yaml create mode 100644 configs/balance_exp_configs/imagenet_val_oscar_max_20.yaml create mode 100644 configs/balance_exp_configs/imagenet_val_oscar_max_5.yaml rename configs/{ => co3d_configs}/co3d_train.yaml (100%) rename configs/{ => co3d_configs}/co3d_train_oscar.yaml (100%) rename configs/{ => co3d_configs}/co3d_val.yaml (100%) rename configs/{co3d_val_oscar.yaml => co3d_configs/co3d_val_auc_oscar.yaml} (100%) create mode 100644 configs/co3d_configs/co3d_val_spearman_oscar.yaml rename configs/{ => co3d_constancy_configs}/co3d_constancy_val.yaml (100%) rename configs/{ => co3d_constancy_configs}/co3d_constancy_val_oscar.yaml (100%) rename configs/{ => co3d_constancy_configs}/constancy_ceiling_auc.yaml (97%) create mode 100644 configs/co3d_constancy_configs/constancy_ceiling_emd.yaml create mode 100644 configs/co3d_constancy_configs/constancy_ceiling_rank_cosine.yaml create mode 100644 configs/co3d_constancy_configs/constancy_ceiling_rank_pearson.yaml rename configs/{ => co3d_constancy_configs}/constancy_ceiling_spearman.yaml (97%) create mode 100644 configs/exp_configs/imagenet_val_oscar_max_10.yaml create mode 100644 configs/exp_configs/imagenet_val_oscar_max_15.yaml create mode 100644 configs/exp_configs/imagenet_val_oscar_max_20.yaml create mode 100644 configs/exp_configs/imagenet_val_oscar_max_25.yaml create mode 100644 configs/exp_configs/imagenet_val_oscar_max_30.yaml create mode 100644 configs/exp_configs/imagenet_val_oscar_max_5.yaml rename configs/{ => imgnet_configs}/imagenet_co3d_val_oscar.yaml (64%) rename configs/{ => imgnet_configs}/imagenet_train_oscar.yaml (68%) rename configs/{ => imgnet_configs}/imagenet_val_oscar.yaml (64%) create mode 100644 configs/imgnet_configs/imagenet_val_spearman_oscar.yaml rename configs/{ => imgnet_configs}/jay_imagenet_for_co3d_train_0.1.yaml (91%) rename configs/{ => imgnet_configs}/jay_imagenet_for_co3d_val_0.1.yaml (91%) delete mode 100644 configs/jay_imagenet_co3d_val_04_02_2025.yaml delete mode 100644 configs/jay_imagenet_co3d_val_04_30_2025.yaml delete mode 100644 configs/jay_imagenet_train_0.1.yaml delete mode 100644 configs/jay_imagenet_train_02_19_2025.yaml delete mode 100644 configs/jay_imagenet_train_04_02_2025.yaml delete mode 100644 configs/jay_imagenet_train_04_23_2025.yaml delete mode 100644 configs/jay_imagenet_train_04_30_2025.yaml delete mode 100644 configs/jay_imagenet_train_12_18_2024.yaml delete mode 100644 configs/jay_imagenet_val_0.1.yaml delete mode 100644 configs/jay_imagenet_val_02_19_2025.yaml delete mode 100644 configs/jay_imagenet_val_04_02_2025.yaml delete mode 100644 configs/jay_imagenet_val_04_23_2025.yaml delete mode 100644 configs/jay_imagenet_val_04_30_2025.yaml delete mode 100644 configs/jay_imagenet_val_12_18_2024.yaml create mode 100644 sample_clickmaps.py create mode 100644 scripts/process_imgnet.sh create mode 100644 scripts/run_exp.sh diff --git a/.gitignore b/.gitignore index d0aba0d..2637615 100644 --- a/.gitignore +++ b/.gitignore @@ -15,4 +15,6 @@ co3d_masks jay_work_in_progress/ jay_imagenet_train_04_30_2025_dimensions.npy jay_imagenet_train_0.1_dimensions.npy -*.png \ No newline at end of file +*.png +clickme_datasets/ +*.csv \ No newline at end of file diff --git a/ceiling_floor_estimate.py b/ceiling_floor_estimate.py index e8d0f1e..e5c4129 100644 --- a/ceiling_floor_estimate.py +++ b/ceiling_floor_estimate.py @@ -1,4 +1,5 @@ import os, sys +import random import numpy as np from PIL import Image import json @@ -11,11 +12,53 @@ import gc import torch from joblib import Parallel, delayed -from scipy.stats import spearmanr +from scipy.stats import spearmanr, pearsonr, rankdata, wasserstein_distance_nd +from scipy.spatial.distance import cosine # import resource # Add resource module for file descriptor limits from sklearn.metrics import average_precision_score from torchvision.transforms import functional as tvF +from torchvision.transforms import InterpolationMode +def emd_2d(test_map, ref_map): + test_map = (test_map - test_map.min()) / (test_map.max() - test_map.min()+1e-8) + ref_map = (ref_map - ref_map.min()) / (ref_map.max() - ref_map.min()+1e-8) + return wasserstein_distance_nd(test_map, ref_map) + +def rank_cosine(test_map, ref_map): + ref_map = ref_map.flatten() + test_map = test_map.flatten() + + non_zero_pos = np.where(ref_map != 0)[0] + + ref_rank = rankdata(ref_map, method='average') + test_rank = rankdata(test_map, method='average') + + ref_rank = ref_rank[non_zero_pos] + test_rank = test_rank[non_zero_pos] + ref_rank = np.float64(ref_rank) + test_rank = np.float64(test_rank) + + if test_rank.size > 1 and ref_rank.size > 1: + cosine_score = cosine(ref_rank, test_rank) + return cosine_score + else: + return float('nan') + +def rank_pearson(test_map, ref_map): + ref_map = ref_map.flatten() + test_map = test_map.flatten() + + non_zero_pos = np.where(ref_map != 0)[0] + ref_rank = rankdata(ref_map, method='average') + test_rank = rankdata(test_map, method='average') + + ref_rank = ref_rank[non_zero_pos] + test_rank = test_rank[non_zero_pos] + if test_rank.size > 1 and ref_rank.size > 1: + pearson_score = pearsonr(ref_rank, test_rank) + return pearson_score.statistic + else: + return float('nan') def auc(test_map, reference_map, thresholds=10, metric="iou"): """Compute the area under the IOU curve for a test map and a reference map""" @@ -48,7 +91,7 @@ def auc(test_map, reference_map, thresholds=10, metric="iou"): # Return the area under the curve (trapezoidal integration) # We're integrating over normalized threshold range [0,1] - return np.trapezoid(scores, x=thresholds) if len(thresholds) > 1 else np.mean(scores) + return np.trapz(scores, x=thresholds) if len(thresholds) > 1 else np.mean(scores) def rankorder(test_map, reference_map, threshold=0.): @@ -132,7 +175,7 @@ def compute_rotation_correlation_batch(batch_indices, all_data, all_names, metri target_depth_map = target_data['depth_map'] level_scores = [] for k, clickmap_at_k in enumerate(clickmaps): - if metric == "spearman" and k < (len(clickmaps)-1): + if metric != "auc" and k < (len(clickmaps)-1): continue rand_scores = [] n = len(clickmap_at_k) @@ -141,12 +184,15 @@ def compute_rotation_correlation_batch(batch_indices, all_data, all_names, metri for iteration in range(n_iterations): test_rand_perm = np.random.permutation(n) fh = test_rand_perm[:(n//2)] + fh = random.choices(fh, k=n) test_map = clickmap_at_k[fh].mean(0) if not floor and target_img_name == img_name: target_rand_perm = test_rand_perm else: target_rand_perm = np.random.permutation(target_n) sh = target_rand_perm[(target_n//2):] + sh = random.choices(sh, k=target_n) + reference_map = target_clickmap_at_k[sh].mean(0) # Save for visualization if k == (len(clickmaps)-1) and iteration == (n_iterations-1): @@ -159,7 +205,7 @@ def compute_rotation_correlation_batch(batch_indices, all_data, all_names, metri #Project before blurring if target_img_name != img_name: - test_map = utils.project_img_gpu(test_map, depth_map, w2c, target_w2c, Ks, target_Ks, device=device) + test_map = utils.project_img_gpu(test_map, depth_map, target_depth_map, w2c, target_w2c, Ks, target_Ks, device=device) blur_clickmaps = utils.blur_maps_for_cf( np.stack((test_map, reference_map), axis=0)[None], @@ -177,11 +223,11 @@ def compute_rotation_correlation_batch(batch_indices, all_data, all_names, metri image_shape = config['image_shape'] center_crop = config['center_crop'] if center_crop: - test_map = torch.tensor(test_map) - reference_map = torch.tensor(reference_map) - test_map = tvF.resize(test_map, min(image_shape)) + test_map = torch.tensor(test_map)[None, :, :] + reference_map = torch.tensor(reference_map)[None, :, :] + test_map = tvF.resize(test_map, min(image_shape), interpolation=InterpolationMode.NEAREST_EXACT) test_map = tvF.center_crop(test_map, center_crop) - reference_map = tvF.resize(reference_map, min(image_shape)) + reference_map = tvF.resize(reference_map, min(image_shape), interpolation=InterpolationMode.NEAREST_EXACT) reference_map = tvF.center_crop(reference_map, center_crop) test_map = test_map.numpy().squeeze() reference_map = reference_map.numpy().squeeze() @@ -192,10 +238,16 @@ def compute_rotation_correlation_batch(batch_indices, all_data, all_names, metri score = rankorder(test_map.flatten(), reference_map.flatten()) elif metric == "spearman": score, _ = spearmanr(test_map.flatten(), reference_map.flatten()) - if np.isnan(score): - continue + elif metric == "rank_pearson": + score = rank_pearson(test_map, reference_map) + elif metric == "rank_cosine": + score = rank_cosine(test_map, reference_map) + elif metric == "emd": + score = emd_2d(test_map, reference_map) else: raise ValueError(f"Invalid metric: {metric}") + if np.isnan(score): + continue rand_scores.append(score) # Explicitly free memory @@ -219,7 +271,7 @@ def compute_rotation_correlation_batch(batch_indices, all_data, all_names, metri projected_img = np.asarray(img) if target_img_name != img_name: projected_img = np.moveaxis(projected_img, -1, 0) - projected_img = utils.project_img_gpu(projected_img, depth_map, w2c, target_w2c, Ks, target_Ks, device=device) + projected_img = utils.project_img_gpu(projected_img, depth_map, target_depth_map, w2c, target_w2c, Ks, target_Ks, device=device) projected_img = np.moveaxis(projected_img, 0, -1) title = f"{img_name}_{round(angle_score, 3)}" plot_infos.append({'output_name':image_output_name, 'title':title, @@ -258,7 +310,7 @@ def compute_scale_correlation_batch(batch_indices, all_data, all_names, metric=" level_scores = [] scale_diff = scale / target_scale for k, clickmap_at_k in enumerate(clickmaps): - if metric == "spearman" and k < (len(clickmaps)-1): + if (metric != 'auc') and k < (len(clickmaps)-1): continue rand_scores = [] n = len(clickmap_at_k) @@ -267,6 +319,7 @@ def compute_scale_correlation_batch(batch_indices, all_data, all_names, metric=" for iteration in range(n_iterations): test_rand_perm = np.random.permutation(n) fh = test_rand_perm[:(n // 2)] + fh = random.choices(fh, k=n) # sh = test_rand_perm[(n//2):] test_map = clickmap_at_k[fh].mean(0) if not floor and target_img_name == img_name: @@ -274,6 +327,8 @@ def compute_scale_correlation_batch(batch_indices, all_data, all_names, metric=" else: target_rand_perm = np.random.permutation(target_n) sh = target_rand_perm[(target_n//2):] + sh = random.choices(sh, k=target_n) + reference_map = target_clickmap_at_k[sh].mean(0) if k == (len(clickmaps)-1) and iteration == (n_iterations - 1): unscaled_map = test_map.copy() @@ -285,11 +340,16 @@ def compute_scale_correlation_batch(batch_indices, all_data, all_names, metric=" # Scale before blurring, need to scale kernel size if scale_diff != 1: - test_map = utils.sparse_scale(test_map, scale_diff, device).cpu().numpy() + test_map = utils.sparse_scale(test_map, scale_diff, device).cpu().numpy().squeeze() + # if scale_diff > 1: + # reference_map = torch.tensor(reference_map) + # reference_map = tvF.center_crop(reference_map, test_map.shape) + # reference_map = reference_map.numpy() + blur_clickmaps = utils.blur_maps_for_cf( np.stack((test_map, reference_map), axis=0)[None], - int(blur_size/scale_diff), - int(blur_sigma/scale_diff), + int(blur_size), + int(blur_sigma), gpu_batch_size=2).squeeze() test_map = blur_clickmaps[0] @@ -300,12 +360,12 @@ def compute_scale_correlation_batch(batch_indices, all_data, all_names, metric=" if config: image_shape = config['image_shape'] center_crop = config['center_crop'] - if center_crop: - test_map = torch.tensor(test_map) - reference_map = torch.tensor(reference_map) - test_map = tvF.resize(test_map, min(image_shape)) + if center_crop and test_map.shape[-1] > center_crop[-1]: + test_map = torch.tensor(test_map)[None] + reference_map = torch.tensor(reference_map)[None] + test_map = tvF.resize(test_map, min(image_shape), interpolation=InterpolationMode.NEAREST_EXACT) test_map = tvF.center_crop(test_map, center_crop) - reference_map = tvF.resize(reference_map, min(image_shape)) + reference_map = tvF.resize(reference_map, min(image_shape), interpolation=InterpolationMode.NEAREST_EXACT) reference_map = tvF.center_crop(reference_map, center_crop) test_map = test_map.numpy().squeeze() reference_map = reference_map.numpy().squeeze() @@ -318,6 +378,12 @@ def compute_scale_correlation_batch(batch_indices, all_data, all_names, metric=" score = rankorder(test_map.flatten(), reference_map.flatten()) elif metric == "spearman": score, _ = spearmanr(test_map.flatten(), reference_map.flatten()) + elif metric == "rank_pearson": + score = rank_pearson(test_map, reference_map) + elif metric == "rank_cosine": + score = rank_cosine(test_map, reference_map) + elif metric == "emd": + score = emd_2d(test_map, reference_map) else: raise ValueError(f"Invalid metric: {metric}") rand_scores.append(score) @@ -348,59 +414,88 @@ def compute_scale_correlation_batch(batch_indices, all_data, all_names, metric=" 'reference_map':reference_map, 'test_map': test_map}) return batch_results, scale_results, plot_infos -def compute_correlation_batch(batch_indices, all_clickmaps, all_names, metric="auc", n_iterations=10, device='cuda', blur_size=11, blur_sigma=1.5, floor=False, config=None): +def compute_correlation_batch(batch_indices, all_clickmaps, all_names, metric="auc", n_iterations=10, device='cuda', blur_size=11, blur_sigma=1.5, floor=False, config=None, metadata=None): """Compute split-half correlations for a batch of clickmaps in parallel""" batch_results = [] all_scores = {} + max_kernel_size = config.get("max_kernel_size", 51) + blur_sigma_function = config.get("blur_sigma_function", lambda x: x) for i in tqdm(batch_indices, desc="Computing split-half correlations", total=len(batch_indices)): clickmaps = all_clickmaps[i] img_name = all_names[i] level_corrs = [] + #TODO modify for speed up + if metadata and img_name in metadata: + native_size = metadata[img_name] + short_side = min(native_size) + scale = short_side / min(clickmaps[-1].shape[-2:]) + adj_blur_size = int(np.round(blur_size * scale)) + if not adj_blur_size % 2: + adj_blur_size += 1 + adj_blur_size = min(adj_blur_size, max_kernel_size) + adj_blur_sigma = blur_sigma_function(adj_blur_size) + else: + adj_blur_size = blur_size + adj_blur_sigma = blur_sigma + if floor: - rand_i = np.random.choice([j for j in range(len(all_clickmaps)) if j != i]) - for k, clickmap_at_k in enumerate(clickmaps): + rand_i = np.random.randint(len(all_clickmaps) - 1) + if rand_i >= i: + rand_i += 1 + rand_clickmaps = all_clickmaps[rand_i] + rand_name = all_names[rand_i] + if metadata and rand_name in metadata: + native_size = metadata[rand_name] + short_side = min(native_size) + scale = short_side / min(rand_clickmaps[-1].shape[-2:]) + rand_adj_blur_size = int(np.round(blur_size * scale)) + if not adj_blur_size % 2: + rand_adj_blur_size += 1 + rand_adj_blur_size = min(rand_adj_blur_size, max_kernel_size) + rand_adj_blur_sigma = blur_sigma_function(rand_adj_blur_size) + else: + rand_adj_blur_size = blur_size + rand_adj_blur_sigma = blur_sigma + + for k , clickmap_at_k, in enumerate(clickmaps): + if metric != "auc" and k < (len(clickmaps)-1): + continue rand_corrs = [] n = len(clickmap_at_k) + if floor: + rand_clickmap_at_k = rand_clickmaps[k] + rand_n = len(rand_clickmap_at_k) for _ in range(n_iterations): rand_perm = np.random.permutation(n) fh = rand_perm[:(n // 2)] - sh = rand_perm[(n // 2):] - + # Add bootstrapping to max fh/sh size to original img + fh = random.choices(fh, k=n) # Create the test and reference maps test_map = clickmap_at_k[fh].mean(0) if floor: - rand_perm = np.random.permutation(len(all_clickmaps[rand_i][k])) - sh = rand_perm[(n // 2):] - reference_map = all_clickmaps[rand_i][k][sh].mean(0) # Take maps from the same level in a random other image - #TODO Add adjusted blur size for imagenet images, add resize and center crop - # Ensure reference_map has the same shape as test_map - if reference_map.shape != test_map.shape: - # Resize reference_map to match test_map's shape - reference_map_resized = np.zeros(test_map.shape, dtype=reference_map.dtype) - # Copy the smaller of the dimensions for each axis - min_height = min(reference_map.shape[0], test_map.shape[0]) - min_width = min(reference_map.shape[1], test_map.shape[1]) - reference_map_resized[:min_height, :min_width] = reference_map[:min_height, :min_width] - reference_map = reference_map_resized - + rand_perm = np.random.permutation(rand_n) + sh = rand_perm[(rand_n // 2):] + sh = random.choices(sh, k=rand_n) + reference_map = rand_clickmap_at_k[sh].mean(0) # Take maps from the same level in a random other image reference_map = utils.blur_maps_for_cf( reference_map[None, None], - blur_size, - blur_sigma, + rand_adj_blur_size, + rand_adj_blur_sigma, gpu_batch_size=1).squeeze() test_map = utils.blur_maps_for_cf( test_map[None, None], - blur_size, - blur_sigma, + adj_blur_size, + adj_blur_sigma, gpu_batch_size=1).squeeze() else: + sh = rand_perm[(n // 2):] + sh = random.choices(sh, k=n) reference_map = clickmap_at_k[sh].mean(0) - # Make maps for each blur_clickmaps = utils.blur_maps_for_cf( np.stack((test_map, reference_map), axis=0)[None], - blur_size, - blur_sigma, + adj_blur_size, + adj_blur_sigma, gpu_batch_size=2).squeeze() test_map = blur_clickmaps[0] reference_map = blur_clickmaps[1] @@ -408,11 +503,11 @@ def compute_correlation_batch(batch_indices, all_clickmaps, all_names, metric="a image_shape = config['image_shape'] center_crop = config['center_crop'] if center_crop: - test_map = torch.tensor(test_map) - reference_map = torch.tensor(reference_map) - test_map = tvF.resize(test_map, min(image_shape)) + test_map = torch.tensor(test_map)[None] + reference_map = torch.tensor(reference_map)[None] + test_map = tvF.resize(test_map, min(image_shape), interpolation=InterpolationMode.NEAREST_EXACT) test_map = tvF.center_crop(test_map, center_crop) - reference_map = tvF.resize(reference_map, min(image_shape)) + reference_map = tvF.resize(reference_map, min(image_shape), interpolation=InterpolationMode.NEAREST_EXACT) reference_map = tvF.center_crop(reference_map, center_crop) test_map = test_map.numpy().squeeze() reference_map = reference_map.numpy().squeeze() @@ -423,6 +518,12 @@ def compute_correlation_batch(batch_indices, all_clickmaps, all_names, metric="a score = rankorder(test_map.flatten(), reference_map.flatten()) elif metric == "spearman": score, _ = spearmanr(test_map.flatten(), reference_map.flatten()) + elif metric == "rank_pearson": + score = rank_pearson(test_map, reference_map) + elif metric == "rank_cosine": + score = rank_cosine(test_map, reference_map) + elif metric == "emd": + score = emd_2d(test_map, reference_map) else: raise ValueError(f"Invalid metric: {metric}") rand_corrs.append(score) @@ -431,7 +532,7 @@ def compute_correlation_batch(batch_indices, all_clickmaps, all_names, metric="a if 'blur_clickmaps' in locals(): del blur_clickmaps - rand_corrs = np.asarray(rand_corrs).mean() # Take the mean of the random correlations + rand_corrs = np.nanmean(np.asarray(rand_corrs)) # Take the mean of the random correlations level_corrs.append(rand_corrs) # Free memory gc.collect() @@ -495,6 +596,9 @@ def compute_correlation_batch(batch_indices, all_clickmaps, all_names, metric="a if "constancy" not in config: config["constancy"] = False + + if "max_subjects" not in config: + config["max_subjects"] = float('inf') if args.metric is not None: config["metric"] = args.metric @@ -560,7 +664,7 @@ def compute_correlation_batch(batch_indices, all_clickmaps, all_names, metric="a os.makedirs(click_counts_dir, exist_ok=True) # Original code for non-HDF5 format - hdf5_path = os.path.join(output_dir, f"{config['experiment_name']}.h5") + hdf5_path = os.path.join(output_dir, f"{config['experiment_name']}_ceiling_metadata.h5") print(f"Saving results to file: {hdf5_path}") with h5py.File(hdf5_path, 'w') as f: f.create_group("clickmaps") @@ -719,6 +823,7 @@ def compute_correlation_batch(batch_indices, all_clickmaps, all_names, metric="a blur_sigma=config.get("blur_sigma", config["blur_size"]), floor=False, config=config, + metadata=metadata, ) for batch in tqdm(batches, desc="Computing ceiling batches", total=len(batches)) ) ceiling_results, all_ceilings = zip(*ceiling_returns) @@ -736,6 +841,7 @@ def compute_correlation_batch(batch_indices, all_clickmaps, all_names, metric="a blur_sigma=config.get("blur_sigma", config["blur_size"]), floor=True, config=config, + metadata=metadata, ) for batch in tqdm(batches, desc="Computing floor batches", total=len(batches)) ) floor_results, all_floors = zip(*floor_returns) @@ -794,13 +900,6 @@ def compute_correlation_batch(batch_indices, all_clickmaps, all_names, metric="a clickmap = all_clickmaps[i] img_idx = int(name.split('.')[0].split('_')[-1]) zoom_level = img_idx % 3 - # if zoom_level != 2: - # target_img_idx = img_idx+(2-zoom_level) - # else: - # target_img_idx = img_idx - # target_img_name = f"{'_'.join(name.split('.')[0].split('_')[:-1])}_{str(target_img_idx).zfill(5)}.png" - # if target_img_name not in all_names: - # continue depth_path = os.path.join(depth_root, f"depth_{name.replace('.png', '.npy')}") depth_map = np.load(depth_path) all_data[name] = {"clickmap": clickmap, "scale":scales_dict[name], "zoom":zoom_level, 'img_idx':img_idx, diff --git a/ceiling_floor_estimate_large.py b/ceiling_floor_estimate_large.py new file mode 100644 index 0000000..58def14 --- /dev/null +++ b/ceiling_floor_estimate_large.py @@ -0,0 +1,665 @@ +import os, sys +import random +import numpy as np +from PIL import Image +import json +import pandas as pd +import argparse +from matplotlib import pyplot as plt +from src import utils +from tqdm import tqdm +import h5py +import gc +import torch +from joblib import Parallel, delayed +from scipy.stats import spearmanr, pearsonr, rankdata, wasserstein_distance_nd +from scipy.spatial.distance import cosine +# import resource # Add resource module for file descriptor limits +from sklearn.metrics import average_precision_score +from torchvision.transforms import functional as tvF +from torchvision.transforms import InterpolationMode + +def emd_2d(test_map, ref_map): + test_map = (test_map - test_map.min()) / (test_map.max() - test_map.min()+1e-8) + ref_map = (ref_map - ref_map.min()) / (ref_map.max() - ref_map.min()+1e-8) + return wasserstein_distance_nd(test_map, ref_map) + +def rank_cosine(test_map, ref_map): + ref_map = ref_map.flatten() + test_map = test_map.flatten() + + non_zero_pos = np.where(ref_map != 0)[0] + + ref_rank = rankdata(ref_map, method='average') + test_rank = rankdata(test_map, method='average') + + ref_rank = ref_rank[non_zero_pos] + test_rank = test_rank[non_zero_pos] + ref_rank = np.float64(ref_rank) + test_rank = np.float64(test_rank) + + if test_rank.size > 1 and ref_rank.size > 1: + cosine_score = cosine(ref_rank, test_rank) + return cosine_score + else: + return float('nan') + +def rank_pearson(test_map, ref_map): + ref_map = ref_map.flatten() + test_map = test_map.flatten() + + non_zero_pos = np.where(ref_map != 0)[0] + ref_rank = rankdata(ref_map, method='average') + test_rank = rankdata(test_map, method='average') + + ref_rank = ref_rank[non_zero_pos] + test_rank = test_rank[non_zero_pos] + if test_rank.size > 1 and ref_rank.size > 1: + pearson_score = pearsonr(ref_rank, test_rank) + return pearson_score.statistic + else: + return float('nan') + +def auc(test_map, reference_map, thresholds=10, metric="iou"): + """Compute the area under the IOU curve for a test map and a reference map""" + scores = [] + + # Normalize each map to [0,1] + test_map = (test_map - test_map.min()) / (test_map.max() - test_map.min()+1e-8) + reference_map = (reference_map - reference_map.min()) / (reference_map.max() - reference_map.min()+1e-8) + + # Create evenly spaced thresholds from 0 to 1 + # if thresholds == 1: + # thresholds = [0] + # else: + # thresholds = np.linspace(0, 1, thresholds) + thresholds = np.arange(0.05, 1., 0.05) + + # Calculate IOU at each threshold pair + for threshold in thresholds: + ref_binary = reference_map > threshold + if metric.lower() == "map": + score = average_precision_score(ref_binary, test_map) + elif metric.lower() == "iou": + test_binary = test_map > threshold + intersection = np.sum(np.logical_and(test_binary, ref_binary)) + union = np.sum(np.logical_or(test_binary, ref_binary)) + score = intersection / union if union > 0 else 0.0 + else: + raise ValueError(f"Invalid metric: {metric}") + scores.append(score) + + # Return the area under the curve (trapezoidal integration) + # We're integrating over normalized threshold range [0,1] + return np.trapz(scores, x=thresholds) if len(thresholds) > 1 else np.mean(scores) + + +def rankorder(test_map, reference_map, threshold=0.): + """ + 1. Rank order the test map. + 2. Binarize the reference map to get a mask of locations that we look at + 3. Average test map ranks within the reference map + + Parameters: + ----------- + test_map : numpy.ndarray + The test map to be rank ordered + reference_map : numpy.ndarray + The reference map to be binarized + threshold : float, optional + Threshold to binarize the reference map, default is 0.5 + + Returns: + -------- + float + The average rank of test map values within the reference map mask + """ + # Normalize the reference map + reference_map = reference_map / reference_map.max() + + # Binarize the reference map to create a mask + mask = reference_map > threshold + + # Get flat indices of non-zero elements in mask + mask_indices = np.where(mask.flatten())[0] + + if mask_indices.size == 0: + return 0.0 # Return 0 if no pixels are in the mask + + # Get the flattened test map + flat_test_map = test_map.flatten() + + # Rank order the test map (higher values get higher ranks) + # First argsort finds positions in sorted order + # Second argsort converts those positions to ranks + # We use flat_test_map directly (not negated) to make higher values = higher ranks + ranks = np.argsort(np.argsort(flat_test_map)) + + # Normalize ranks to [0, 1] where 1 represents the highest value + normalized_ranks = ranks / (len(ranks) - 1) if len(ranks) > 1 else ranks + + # Calculate mean rank within mask + mean_rank = normalized_ranks[mask_indices].mean() + + return mean_rank + + +def compute_correlation_batch(batch_indices, all_clickmaps, all_names, metric="auc", n_iterations=10, device='cuda', blur_size=11, blur_sigma=1.5, floor=False, config=None, metadata=None): + """Compute split-half correlations for a batch of clickmaps in parallel""" + batch_results = [] + all_scores = {} + max_kernel_size = config.get("max_kernel_size", 51) + blur_sigma_function = config.get("blur_sigma_function", lambda x: x) + for i in tqdm(batch_indices, desc="Computing split-half correlations", total=len(batch_indices)): + hd5_name = f"clickmap_{str(i).zfill(8)}" + clickmaps = all_clickmaps[hd5_name] + img_name = all_names[i] + level_corrs = [] + if metadata and img_name in metadata: + native_size = metadata[img_name] + short_side = min(native_size) + scale = short_side / min(clickmaps[-1].shape[-2:]) + adj_blur_size = int(np.round(blur_size * scale)) + if not adj_blur_size % 2: + adj_blur_size += 1 + adj_blur_size = min(adj_blur_size, max_kernel_size) + adj_blur_sigma = blur_sigma_function(adj_blur_size) + else: + adj_blur_size = blur_size + adj_blur_sigma = blur_sigma + if metadata: + print("Missing in Metadata", img_name) + if floor: + rand_i = np.random.randint(len(all_clickmaps) - 1) + if rand_i >= i: + rand_i += 1 + rand_clickmaps = all_clickmaps[rand_i] + rand_name = all_names[rand_i] + rand_hd5_name = f"clickmap_{str(rand_i).zfill(8)}" + random_map = all_clickmaps[rand_hd5_name] + if metadata and rand_name in metadata: + native_size = metadata[rand_name] + short_side = min(native_size) + scale = short_side / min(random_map[-1].shape[-2:]) + rand_adj_blur_size = int(np.round(blur_size * scale)) + if not adj_blur_size % 2: + rand_adj_blur_size += 1 + rand_adj_blur_size = min(rand_adj_blur_size, max_kernel_size) + rand_adj_blur_sigma = blur_sigma_function(rand_adj_blur_size) + else: + rand_adj_blur_size = blur_size + rand_adj_blur_sigma = blur_sigma + if metadata: + print("Missing in Metadata", rand_name) + + for k, clickmap_at_k in enumerate(clickmaps): + if metric != "auc" and k < (len(clickmaps)-1): + continue + rand_corrs = [] + n = len(clickmap_at_k) + if floor: + rand_clickmap_at_k = random_map[k] + rand_n = len(rand_clickmap_at_k) + for _ in range(n_iterations): + rand_perm = np.random.permutation(n) + fh = rand_perm[:(n // 2)] + # Add bootstrapping to max fh/sh size to original img + fh = random.choices(fh, k=n) + # Create the test and reference maps + test_map = clickmap_at_k[fh].mean(0) + if floor: + rand_perm = np.random.permutation(rand_n) + sh = rand_perm[(rand_n // 2):] + sh = random.choices(sh, k=rand_n) + reference_map = rand_clickmap_at_k[sh].mean(0) # Take maps from the same level in a random other image + reference_map = utils.blur_maps_for_cf( + reference_map[None, None], + rand_adj_blur_size, + rand_adj_blur_sigma, + gpu_batch_size=1).squeeze() + test_map = utils.blur_maps_for_cf( + test_map[None, None], + adj_blur_size, + adj_blur_sigma, + gpu_batch_size=1).squeeze() + else: + sh = rand_perm[(n // 2):] + sh = random.choices(sh, k=n) + reference_map = clickmap_at_k[sh].mean(0) + + # Make maps for each + blur_clickmaps = utils.blur_maps_for_cf( + np.stack((test_map, reference_map), axis=0)[None], + adj_blur_size, + adj_blur_sigma, + gpu_batch_size=2).squeeze() + test_map = blur_clickmaps[0] + reference_map = blur_clickmaps[1] + if config: + image_shape = config['image_shape'] + center_crop = config['center_crop'] + if center_crop: + test_map = torch.tensor(test_map)[None, :, :] + reference_map = torch.tensor(reference_map)[None, :, :] + test_map = tvF.resize(test_map, min(image_shape), interpolation=InterpolationMode.NEAREST_EXACT) + test_map = tvF.center_crop(test_map, center_crop) + reference_map = tvF.resize(reference_map, min(image_shape), interpolation=InterpolationMode.NEAREST_EXACT) + reference_map = tvF.center_crop(reference_map, center_crop) + test_map = test_map.numpy().squeeze() + reference_map = reference_map.numpy().squeeze() + # Use scipy's spearman correlation + if metric == "auc": + score = auc(test_map.flatten(), reference_map.flatten()) + elif metric == "rankorder": + score = rankorder(test_map.flatten(), reference_map.flatten()) + elif metric == "spearman": + score, _ = spearmanr(test_map.flatten(), reference_map.flatten()) + elif metric == "rank_pearson": + score = rank_pearson(test_map, reference_map) + elif metric == "rank_cosine": + score = rank_cosine(test_map, reference_map) + elif metric == "emd": + score = emd_2d(test_map, reference_map) + else: + raise ValueError(f"Invalid metric: {metric}") + rand_corrs.append(score) + + # Explicitly free memory + if 'blur_clickmaps' in locals(): + del blur_clickmaps + + rand_corrs = np.nanmean(np.asarray(rand_corrs)) # Take the mean of the random correlations + level_corrs.append(rand_corrs) + # Free memory + gc.collect() + batch_results.append(np.asarray(level_corrs).mean()) # Integrate over the levels + all_scores[img_name] = batch_results[-1] + return batch_results, all_scores + + +if __name__ == "__main__": + # Add command line arguments + parser = argparse.ArgumentParser(description="Process clickme data for modeling") + parser.add_argument('config', nargs='?', help='Path to config file') + parser.add_argument('--debug', action='store_true', help='Enable additional debug output') + parser.add_argument('--verbose', action='store_true', help='Show detailed progress for GPU processing') + parser.add_argument('--gpu-batch-size', type=int, default=None, help='Override GPU batch size') + parser.add_argument('--max-workers', type=int, default=None, help='Maximum number of CPU workers') + parser.add_argument('--profile', action='store_true', help='Enable performance profiling') + parser.add_argument('--filter-duplicates', action='store_false', help='Filter duplicate participant submissions, keeping only the first submission per image') + parser.add_argument('--max-open-files', type=int, default=4096, help='Maximum number of open files allowed') + parser.add_argument('--correlation-batch-size', type=int, default=None, help='Override correlation batch size') + parser.add_argument('--correlation-jobs', type=int, default=None, help='Override number of parallel jobs for correlation') + parser.add_argument('--metric', type=str, default=None, help='Metric to use for correlation') + parser.add_argument('--time_based_bins', action='store_true', help='Enable time based bin threshold instead of count based') + parser.add_argument('--save_json', default=False, action='store_true') + args = parser.parse_args() + + # Increase file descriptor limit + # try: + # soft, hard = resource.getrlimit(resource.RLIMIT_NOFILE) + # print(f"Current file descriptor limits: soft={soft}, hard={hard}") + # new_soft = min(args.max_open_files, hard) + # resource.setrlimit(resource.RLIMIT_NOFILE, (new_soft, hard)) + # print(f"Increased file descriptor soft limit to {new_soft}") + # except (ValueError, resource.error) as e: + # print(f"Warning: Could not increase file descriptor limit: {e}") + + # Start profiling if requested + if args.profile: + import cProfile + profiler = cProfile.Profile() + profiler.enable() + + # Load config file + if args.config: + config_file = args.config if "configs" + os.path.sep in args.config else os.path.join("configs", args.config) + assert os.path.exists(config_file), f"Cannot find config file: {config_file}" + config = utils.process_config(config_file) + else: + config_file = utils.get_config(sys.argv) + config = utils.process_config(config_file) + if "max_subjects" not in config: + config["max_subjects"] = float('inf') + # Add filter_duplicates to config if not present + if "filter_duplicates" not in config: + config["filter_duplicates"] = args.filter_duplicates + if "save_json" not in config: + config["save_json"] = args.save_json + + # Add time_based_bins to config if not present + if "time_based_bins" not in config: + config["time_based_bins"] = args.time_based_bins + + if "constancy" not in config: + config["constancy"] = False + + if args.metric is not None: + config["metric"] = args.metric + print(f"Overwriting metric to {args.metric}") + + # Load clickme data + print(f"Loading clickme data...") + clickme_data = utils.process_clickme_data( + config["clickme_data"], + config["filter_mobile"]) + total_maps = len(clickme_data) + + # Apply duplicate filtering if requested + if config["filter_duplicates"] or args.filter_duplicates: + clickme_data = utils.filter_duplicate_participants(clickme_data) + total_maps = len(clickme_data) + + # Validate clickme data structure + print(f"Validating clickme data structure for {total_maps} maps...") + image_paths = clickme_data['image_path'].unique() + total_unique_images = len(image_paths) + print(f"Found {total_unique_images} unique images") + + # Set up GPU configuration + if args.gpu_batch_size: + config["gpu_batch_size"] = args.gpu_batch_size + else: + config["gpu_batch_size"] = 4096 + + # Optimize number of workers based on CPU count + cpu_count = os.cpu_count() + if args.max_workers: + config["n_jobs"] = min(args.max_workers, cpu_count) + else: + # Leave some cores free for system operations + config["n_jobs"] = max(1, min(cpu_count - 1, 8)) + + # Verify GPU is available + config["use_gpu_blurring"] = torch.cuda.is_available() + if config["use_gpu_blurring"]: + # Print GPU info + gpu_name = torch.cuda.get_device_name(0) + gpu_memory = torch.cuda.get_device_properties(0).total_memory / (1024**3) # GB + print(f"Using GPU: {gpu_name} with {gpu_memory:.2f} GB memory") + else: + print("GPU not available, exiting.") + sys.exit(1) + + # Set up output format + if "output_format" not in config or config["output_format"] == "auto": + config["output_format"] = "hdf5" if total_maps > 100000 else "numpy" + output_format = config["output_format"] + + # Ensure all directories exist + output_dir = config["assets"] + image_output_dir = config["example_image_output_dir"] + temp_dir = config["temp_dir"] + + os.makedirs(temp_dir, exist_ok=True) + os.makedirs(output_dir, exist_ok=True) + os.makedirs(image_output_dir, exist_ok=True) + os.makedirs(os.path.join(output_dir, config["experiment_name"]), exist_ok=True) + # Create dedicated directory for click counts + click_counts_dir = os.path.join(output_dir, f"{config['experiment_name']}_click_counts") + os.makedirs(click_counts_dir, exist_ok=True) + + # Original code for non-HDF5 format + hdf5_path = os.path.join(output_dir, f"{config['experiment_name']}.h5") + print(f"Saving results to file: {hdf5_path}") + with h5py.File(hdf5_path, 'w') as f: + f.create_group("clickmaps") + f.create_group("click_counts") # Add group for click counts + meta_grp = f.create_group("metadata") + meta_grp.attrs["total_unique_images"] = total_unique_images + meta_grp.attrs["total_maps"] = total_maps + meta_grp.attrs["filter_duplicates"] = np.bytes_("True" if config["filter_duplicates"] else "False") + meta_grp.attrs["creation_date"] = np.bytes_(pd.Timestamp.now().strftime("%Y-%m-%d %H:%M:%S")) + + # Print optimization settings + print("\nProcessing settings:") + print(f"- Dataset size: {total_maps} maps, {total_unique_images} images") + print(f"- GPU batch size: {config['gpu_batch_size']}") + print(f"- CPU workers: {config['n_jobs']}") + print(f"- Output format: {config['output_format']}") + print(f"- Filter duplicates: {config['filter_duplicates']}") + print(f"- Memory usage at start: {utils.get_memory_usage():.2f} MB\n") + + # Choose processing method (compiled Cython vs. Python) + use_cython = config.get("use_cython", True) + if use_cython: + try: + from src import cython_utils + create_clickmap_func = cython_utils.create_clickmap_fast + fast_duplicate_detection = cython_utils.fast_duplicate_detection + fast_ious_binary = cython_utils.fast_ious_binary + print("Using Cython-optimized functions") + except (ImportError, ModuleNotFoundError) as e: + use_cython = False + from src import python_utils + create_clickmap_func = python_utils.create_clickmap_fast + fast_duplicate_detection = python_utils.fast_duplicate_detection + fast_ious_binary = python_utils.fast_ious_binary + print(f"Cython modules not available: {e}") + print("Falling back to Python implementation. For best performance, run 'python compile_cython.py build_ext --inplace' first.") + else: + from src import python_utils + create_clickmap_func = python_utils.create_clickmap_fast + fast_duplicate_detection = python_utils.fast_duplicate_detection + fast_ious_binary = python_utils.fast_ious_binary + + # Load metadata + if config["metadata_file"]: + metadata = np.load(config["metadata_file"], allow_pickle=True).item() + else: + metadata = None + + print("Processing clickme data...") + # Always use parallel processing for large datasets + clickmaps, ccounts = utils.process_clickmap_files_parallel( + clickme_data=clickme_data, + image_path=config["image_path"], + file_inclusion_filter=config["file_inclusion_filter"], + file_exclusion_filter=config["file_exclusion_filter"], + min_clicks=config["min_clicks"], + max_clicks=config["max_clicks"], + n_jobs=config["n_jobs"]) + + # Apply filters if necessary + if config["class_filter_file"]: + print("Filtering classes...") + clickmaps = utils.filter_classes( + clickmaps=clickmaps, + class_filter_file=config["class_filter_file"]) + + if config["participant_filter"]: + print("Filtering participants...") + clickmaps = utils.filter_participants(clickmaps) + + # Process all maps with our new single-batch GPU function + print(f"Processing with GPU (batch size: {config['gpu_batch_size']})...") + final_clickmaps, all_clickmaps, categories, final_keep_index, click_counts, clickmap_bins = utils.process_all_maps_multi_thresh_gpu( + clickmaps=clickmaps, + config=config, + metadata=metadata, + create_clickmap_func=create_clickmap_func, + fast_duplicate_detection=fast_duplicate_detection, + return_before_blur=True, + average_maps=False, + time_based_bins=config['time_based_bins'], + save_to_disk=True, + maximum_length=5000, + ) + # Apply mask filtering if needed\ + # Not modified since it's not used for imagenet clickmaps + if final_keep_index and config["mask_dir"]: + print("Applying mask filtering...") + masks = utils.load_masks(config["mask_dir"]) + final_clickmaps, all_clickmaps, categories, final_keep_index = utils.filter_for_foreground_masks( + final_clickmaps=final_clickmaps, + all_clickmaps=all_clickmaps, + categories=categories, + masks=masks, + mask_threshold=config["mask_threshold"] + ) + # Update click counts to match filtered images + click_counts = {k: click_counts[k] for k in final_keep_index if k in click_counts} + + # Convert all_clickmaps to the format expected by the correlation code + image_shape = config["image_shape"] + correlation_batch_size = config["correlation_batch_size"] + null_iterations = config["null_iterations"] + metric = config["metric"] + n_jobs = config["n_jobs"] + gpu_batch_size = config["gpu_batch_size"] + + # Override configuration with command-line arguments if provided + if args.correlation_batch_size: + correlation_batch_size = args.correlation_batch_size + print(f"Overriding correlation batch size: {correlation_batch_size}") + else: + # Increase default batch size to speed up processing + correlation_batch_size = max(correlation_batch_size, 16) + + if args.correlation_jobs: + n_jobs = args.correlation_jobs + print(f"Overriding correlation jobs: {n_jobs}") + else: + # Increase default number of jobs + n_jobs = max(n_jobs, min(16, os.cpu_count())) + + # Check if GPU is available + if torch.cuda.is_available(): + device = 'cuda' + print(f"GPU detected: {torch.cuda.get_device_name(0)}") + print(f"Setting batch size to {gpu_batch_size} for GPU operations") + else: + device = 'cpu' + print("No GPU detected, using CPU for processing") + gpu_batch_size = 16 # Smaller batch size for CPU + print(f"Converting clickmaps for correlation analysis...") + + # Compute scores through split-halfs + # Optimize by processing in batches for better parallelization + print(f"Computing split-half correlations in parallel (n_jobs={n_jobs}, batch_size={correlation_batch_size})...") + temp_file = h5py.File(temp_dir, 'r') + temp_group = temp_file['clickmaps'] + all_clickmaps=temp_group + num_clickmaps = len(temp_group) + # Prepare batches for correlation computation + indices = list(range(num_clickmaps)) + batches = [indices[i:i+correlation_batch_size] for i in range(0, len(indices), correlation_batch_size)] + + # # Reduce the number of jobs if there are many batches to prevent too many files open + # adjusted_n_jobs = min(n_jobs, max(1, 20 // len(batches) + 1)) + # if adjusted_n_jobs < n_jobs: + # print(f"Reducing parallel jobs from {n_jobs} to {adjusted_n_jobs} to prevent 'too many files open' error") + # n_jobs = adjusted_n_jobs + + # Process correlation batches in parallel + ceiling_returns = Parallel(n_jobs=n_jobs, prefer="threads")( + delayed(compute_correlation_batch)( + batch_indices=batch, + all_clickmaps=all_clickmaps, + all_names=final_keep_index, + metric=metric, + n_iterations=null_iterations, + device=device, + blur_size=config["blur_size"], + blur_sigma=config.get("blur_sigma", config["blur_size"]), + floor=False, + config=config, + metadata=metadata, + ) for batch in tqdm(batches, desc="Computing ceiling batches", total=len(batches)) + ) + ceiling_results, all_ceilings = zip(*ceiling_returns) + # Force garbage collection between major operations + gc.collect() + + # floor_returns = Parallel(n_jobs=n_jobs, prefer="threads")( + # delayed(compute_correlation_batch)( + # batch_indices=batch, + # all_clickmaps=all_clickmaps, + # all_names=final_keep_index, + # metric=metric, + # n_iterations=null_iterations, + # device=device, + # blur_size=config["blur_size"], + # blur_sigma=config.get("blur_sigma", config["blur_size"]), + # floor=True, + # config=config, + # metadata=metadata, + # ) for batch in tqdm(batches, desc="Computing floor batches", total=len(batches)) + # ) + # floor_results, all_floors = zip(*floor_returns) + all_img_ceilings = {} + #all_img_floors = {} + for img_ceilings in all_ceilings: + for img_name, score in img_ceilings.items(): + all_img_ceilings[img_name] = score + # for img_ceilings in all_floors: + # for img_name, score in img_ceilings.items(): + # all_img_floors[img_name] = score + # Flatten the results + all_ceilings = np.concatenate(ceiling_results) + # all_floors = np.concatenate(floor_results) + + # Compute the mean of the ceilings and floors + mean_ceiling = all_ceilings.mean() + # mean_floor = all_floors.mean() + + # Compute the ratio of the mean of the ceilings to the mean of the floors + # ratio = mean_ceiling / mean_floor + # print(f"Mean ceiling: {mean_ceiling}, Mean floor: {mean_floor}, Ratio: {ratio}") + + # Save the results + np.savez( + os.path.join(output_dir, f"{config['experiment_name']}_{config['metric']}_ceiling_results.npz"), + mean_ceiling=mean_ceiling, + all_ceilings=all_ceilings, + all_img_ceilings=all_img_ceilings) + if config['save_json']: + # Save as json + with open(os.path.join(output_dir, f"{config['experiment_name']}_{config['metric']}_ceiling_results.json"), 'w') as f: + output_json = {"all_imgs": final_keep_index, 'mean_ceiling':mean_ceiling, + 'all_ceilings':all_ceilings, 'all_img_ceilings':all_img_ceilings} + for key, value in output_json.items(): + if isinstance(value, np.ndarray): + output_json[key] = value.tolist() + output_content = json.dumps(output_json, indent=4) + f.write(output_content) + # np.savez( + # os.path.join(output_dir, f"{config['experiment_name']}_{config['metric']}_ceiling_floor_results.npz"), + # mean_ceiling=mean_ceiling, + # mean_floor=mean_floor, + # all_ceilings=all_ceilings, + # all_floors=all_floors, + # all_img_ceilings=all_img_ceilings, + # all_img_floors=all_img_floors, + # ratio=ratio) + # if config['save_json']: + # # Save as json + # with open(os.path.join(output_dir, f"{config['experiment_name']}_{config['metric']}_ceiling_floor_results.json"), 'w') as f: + # output_json = {"all_imgs": final_keep_index, 'mean_ceiling':mean_ceiling, 'mean_floor':mean_floor, + # 'all_ceilings':all_ceilings, 'all_floors':all_floors, 'all_img_ceilings':all_img_ceilings, + # 'all_img_floors':all_img_floors} + # for key, value in output_json.items(): + # if isinstance(value, np.ndarray): + # output_json[key] = value.tolist() + # output_content = json.dumps(output_json, indent=4) + # f.write(output_content) + + # Delete temp file to save disk + if os.path.exists(temp_dir): + os.remove(temp_dir) + + # End profiling if it was enabled + if args.profile: + profiler.disable() + import pstats + from io import StringIO + s = StringIO() + ps = pstats.Stats(profiler, stream=s).sort_stats('cumulative') + ps.print_stats(30) # Print top 30 functions by time + print(s.getvalue()) + + # Save profile results to file + ps.dump_stats(os.path.join(output_dir, "profile_results.prof")) + print(f"Profile results saved to {os.path.join(output_dir, 'profile_results.prof')}") + + print(f"\nProcessing complete! Final memory usage: {utils.get_memory_usage():.2f} MB") diff --git a/configs/balance_exp_configs/imagenet_val_oscar_max_10.yaml b/configs/balance_exp_configs/imagenet_val_oscar_max_10.yaml new file mode 100644 index 0000000..2b43c31 --- /dev/null +++ b/configs/balance_exp_configs/imagenet_val_oscar_max_10.yaml @@ -0,0 +1,46 @@ +assets: assets +temp_dir: temp +blur_size: 21 +center_crop: +- 224 +- 224 +class_filter_file: false +clickme_data: clickme_datasets/sampled_imgnet_val.csv +correlation_batch_size: 1024 +debug: false +display_image_keys: auto +example_image_output_dir: jay_imagenet_val_combined_08_27_2025_images +experiment_name: balanced_exp_10_subjects_08_27_2025 +file_exclusion_filter: null +file_inclusion_filter: ILSVRC2012_val +filter_mobile: true +gpu_batch_size: 4096 +image_path: /gpfs/data/shared/imagenet/ILSVRC2012/val +image_shape: +- 256 +- 256 +mask_dir: null +mask_threshold: 0 +max_clicks: 1000000 +metadata_file: image_metadata/jay_imagenet_val_04_30_2025_dimensions.npy +metric: spearman +min_clicks: 1 +min_subjects: 20 +max_subjects: 10 +n_jobs: -1 +null_iterations: 50 +parallel_prepare_maps: true +parallel_save: false +participant_filter: false +percentile_thresh: 50 +preprocess_db_data: true +processed_clickme_file: jay_imagenet_val_combined_08_27_2025_processed.npz +processed_medians: jay_imagenet_val_combined_08_27_2025_medians.json +remove_string: imagenet/val/ +time_based_bins: true +multi_thresh_gpu: multi_thresh_gpu +output_format: "hdf5" +processed_clickmap_bins: jay_imagenet_val_combined_08_27_2025_clickmap_bins.npy +chunk_size: 100000 +batch_size: 14000 +save_json: true diff --git a/configs/balance_exp_configs/imagenet_val_oscar_max_15.yaml b/configs/balance_exp_configs/imagenet_val_oscar_max_15.yaml new file mode 100644 index 0000000..8b930af --- /dev/null +++ b/configs/balance_exp_configs/imagenet_val_oscar_max_15.yaml @@ -0,0 +1,46 @@ +assets: assets +temp_dir: temp +blur_size: 21 +center_crop: +- 224 +- 224 +class_filter_file: false +clickme_data: clickme_datasets/sampled_imgnet_val.csv +correlation_batch_size: 1024 +debug: false +display_image_keys: auto +example_image_output_dir: jay_imagenet_val_combined_08_27_2025_images +experiment_name: balanced_exp_15_subjects_08_27_2025 +file_exclusion_filter: null +file_inclusion_filter: ILSVRC2012_val +filter_mobile: true +gpu_batch_size: 4096 +image_path: /gpfs/data/shared/imagenet/ILSVRC2012/val +image_shape: +- 256 +- 256 +mask_dir: null +mask_threshold: 0 +max_clicks: 1000000 +metadata_file: image_metadata/jay_imagenet_val_04_30_2025_dimensions.npy +metric: spearman +min_clicks: 1 +min_subjects: 20 +max_subjects: 15 +n_jobs: -1 +null_iterations: 50 +parallel_prepare_maps: true +parallel_save: false +participant_filter: false +percentile_thresh: 50 +preprocess_db_data: true +processed_clickme_file: jay_imagenet_val_combined_08_27_2025_processed.npz +processed_medians: jay_imagenet_val_combined_08_27_2025_medians.json +remove_string: imagenet/val/ +time_based_bins: true +multi_thresh_gpu: multi_thresh_gpu +output_format: "hdf5" +processed_clickmap_bins: jay_imagenet_val_combined_08_27_2025_clickmap_bins.npy +chunk_size: 100000 +batch_size: 14000 +save_json: true diff --git a/configs/balance_exp_configs/imagenet_val_oscar_max_20.yaml b/configs/balance_exp_configs/imagenet_val_oscar_max_20.yaml new file mode 100644 index 0000000..40e5684 --- /dev/null +++ b/configs/balance_exp_configs/imagenet_val_oscar_max_20.yaml @@ -0,0 +1,46 @@ +assets: assets +temp_dir: temp +blur_size: 21 +center_crop: +- 224 +- 224 +class_filter_file: false +clickme_data: clickme_datasets/sampled_imgnet_val.csv +correlation_batch_size: 1024 +debug: false +display_image_keys: auto +example_image_output_dir: jay_imagenet_val_combined_08_27_2025_images +experiment_name: balanced_exp_20_subjects_08_27_2025 +file_exclusion_filter: null +file_inclusion_filter: ILSVRC2012_val +filter_mobile: true +gpu_batch_size: 4096 +image_path: /gpfs/data/shared/imagenet/ILSVRC2012/val +image_shape: +- 256 +- 256 +mask_dir: null +mask_threshold: 0 +max_clicks: 1000000 +metadata_file: image_metadata/jay_imagenet_val_04_30_2025_dimensions.npy +metric: spearman +min_clicks: 1 +min_subjects: 20 +max_subjects: 20 +n_jobs: -1 +null_iterations: 50 +parallel_prepare_maps: true +parallel_save: false +participant_filter: false +percentile_thresh: 50 +preprocess_db_data: true +processed_clickme_file: jay_imagenet_val_combined_08_27_2025_processed.npz +processed_medians: jay_imagenet_val_combined_08_27_2025_medians.json +remove_string: imagenet/val/ +time_based_bins: true +multi_thresh_gpu: multi_thresh_gpu +output_format: "hdf5" +processed_clickmap_bins: jay_imagenet_val_combined_08_27_2025_clickmap_bins.npy +chunk_size: 100000 +batch_size: 14000 +save_json: true diff --git a/configs/balance_exp_configs/imagenet_val_oscar_max_5.yaml b/configs/balance_exp_configs/imagenet_val_oscar_max_5.yaml new file mode 100644 index 0000000..a7e05e4 --- /dev/null +++ b/configs/balance_exp_configs/imagenet_val_oscar_max_5.yaml @@ -0,0 +1,46 @@ +assets: assets +temp_dir: temp +blur_size: 21 +center_crop: +- 224 +- 224 +class_filter_file: false +clickme_data: clickme_datasets/sampled_imgnet_val.csv +correlation_batch_size: 1024 +debug: false +display_image_keys: auto +example_image_output_dir: jay_imagenet_val_combined_08_27_2025_images +experiment_name: balanced_exp_5_subjects_08_27_2025 +file_exclusion_filter: null +file_inclusion_filter: ILSVRC2012_val +filter_mobile: true +gpu_batch_size: 4096 +image_path: /gpfs/data/shared/imagenet/ILSVRC2012/val +image_shape: +- 256 +- 256 +mask_dir: null +mask_threshold: 0 +max_clicks: 1000000 +metadata_file: image_metadata/jay_imagenet_val_04_30_2025_dimensions.npy +metric: spearman +min_clicks: 1 +min_subjects: 20 +max_subjects: 5 +n_jobs: -1 +null_iterations: 50 +parallel_prepare_maps: true +parallel_save: false +participant_filter: false +percentile_thresh: 50 +preprocess_db_data: true +processed_clickme_file: jay_imagenet_val_combined_08_27_2025_processed.npz +processed_medians: jay_imagenet_val_combined_08_27_2025_medians.json +remove_string: imagenet/val/ +time_based_bins: true +multi_thresh_gpu: multi_thresh_gpu +output_format: "hdf5" +processed_clickmap_bins: jay_imagenet_val_combined_08_27_2025_clickmap_bins.npy +chunk_size: 100000 +batch_size: 14000 +save_json: true diff --git a/configs/co3d_train.yaml b/configs/co3d_configs/co3d_train.yaml similarity index 100% rename from configs/co3d_train.yaml rename to configs/co3d_configs/co3d_train.yaml diff --git a/configs/co3d_train_oscar.yaml b/configs/co3d_configs/co3d_train_oscar.yaml similarity index 100% rename from configs/co3d_train_oscar.yaml rename to configs/co3d_configs/co3d_train_oscar.yaml diff --git a/configs/co3d_val.yaml b/configs/co3d_configs/co3d_val.yaml similarity index 100% rename from configs/co3d_val.yaml rename to configs/co3d_configs/co3d_val.yaml diff --git a/configs/co3d_val_oscar.yaml b/configs/co3d_configs/co3d_val_auc_oscar.yaml similarity index 100% rename from configs/co3d_val_oscar.yaml rename to configs/co3d_configs/co3d_val_auc_oscar.yaml diff --git a/configs/co3d_configs/co3d_val_spearman_oscar.yaml b/configs/co3d_configs/co3d_val_spearman_oscar.yaml new file mode 100644 index 0000000..498580e --- /dev/null +++ b/configs/co3d_configs/co3d_val_spearman_oscar.yaml @@ -0,0 +1,41 @@ +assets: assets +blur_size: 21 +center_crop: +- 224 +- 224 +class_filter_file: false +# clickme_data: clickme_datasets/dump_co3d_combined_03_13_2025.npz +clickme_data: /cifs/data/tserre_lrs/projects/projects/prj_video_imagenet/human_clickme_data_processing/clickme_datasets/dump_co3d_05_28_2025_unique.npz +correlation_batch_size: 1024 +debug: false +display_image_keys: auto +example_image_output_dir: co3d_val_images +experiment_name: co3d_val +file_exclusion_filter: ILSVRC2012_val +file_inclusion_filter: CO3D_ClickMe2 +filter_mobile: true +gpu_batch_size: 4096 +image_path: /cifs/data/tserre_lrs/projects/projects/prj_video_imagenet/CO3D_ClickMe2 +image_shape: +- 256 +- 256 +mask_dir: null +mask_threshold: 0. +max_clicks: 1000000 # Set to an impossibly high number to keep all maps +metadata_file: false +metric: spearman +min_clicks: 1 +min_subjects: 6 +n_jobs: -1 +null_iterations: 100 +parallel_prepare_maps: true +parallel_save: true +participant_filter: false +percentile_thresh: 50 +preprocess_db_data: false +processed_clickme_file: co3d_val_processed.npz +processed_clickmap_bins: co3d_val_clickmap_bins.npy +processed_medians: co3d_val_medians.json +remove_string: false +multi_thresh_gpu: true +time_based_bins: true \ No newline at end of file diff --git a/configs/co3d_constancy_val.yaml b/configs/co3d_constancy_configs/co3d_constancy_val.yaml similarity index 100% rename from configs/co3d_constancy_val.yaml rename to configs/co3d_constancy_configs/co3d_constancy_val.yaml diff --git a/configs/co3d_constancy_val_oscar.yaml b/configs/co3d_constancy_configs/co3d_constancy_val_oscar.yaml similarity index 100% rename from configs/co3d_constancy_val_oscar.yaml rename to configs/co3d_constancy_configs/co3d_constancy_val_oscar.yaml diff --git a/configs/constancy_ceiling_auc.yaml b/configs/co3d_constancy_configs/constancy_ceiling_auc.yaml similarity index 97% rename from configs/constancy_ceiling_auc.yaml rename to configs/co3d_constancy_configs/constancy_ceiling_auc.yaml index 45668c0..4040c12 100644 --- a/configs/constancy_ceiling_auc.yaml +++ b/configs/co3d_constancy_configs/constancy_ceiling_auc.yaml @@ -10,7 +10,7 @@ correlation_batch_size: 128 debug: false display_image_keys: auto example_image_output_dir: co3d_constancy_val_images -experiment_name: co3d_constancy_val +experiment_name: co3d_constancy_val_obs file_exclusion_filter: ILSVRC2012_val file_inclusion_filter: CO3D_Constancy # CO3D_ClickMe2 filter_mobile: true diff --git a/configs/co3d_constancy_configs/constancy_ceiling_emd.yaml b/configs/co3d_constancy_configs/constancy_ceiling_emd.yaml new file mode 100644 index 0000000..b7a5557 --- /dev/null +++ b/configs/co3d_constancy_configs/constancy_ceiling_emd.yaml @@ -0,0 +1,45 @@ +assets: assets +blur_size: 21 +center_crop: +- 224 +- 224 +class_filter_file: false +# clickme_data: clickme_datasets/dump_co3d_combined_03_13_2025.npz +clickme_data: /cifs/data/tserre_lrs/projects/projects/prj_video_imagenet/human_clickme_data_processing/clickme_datasets/dump_co3d_05_28_2025_unique.npz +correlation_batch_size: 128 +debug: false +display_image_keys: auto +example_image_output_dir: co3d_constancy_val_images +experiment_name: co3d_constancy_val_obs +file_exclusion_filter: ILSVRC2012_val +file_inclusion_filter: CO3D_Constancy # CO3D_ClickMe2 +filter_mobile: true +gpu_batch_size: 4096 +image_path: /cifs/data/tserre_lrs/projects/projects/prj_video_imagenet/PeRFception/data/co3d_v2/clickme_trajectories/ +image_shape: +- 256 +- 256 +mask_dir: null +mask_threshold: 0. +max_clicks: 1000000 # Set to an impossibly high number to keep all maps +metadata_file: false +metric: emd +min_clicks: 1 +min_subjects: 4 +n_jobs: -1 +null_iterations: 20 # Potentially lower this number for speed up +parallel_prepare_maps: true +parallel_save: true +participant_filter: false +percentile_thresh: 50 +preprocess_db_data: false +processed_clickme_file: co3d_constancy_val_processed.npz +processed_clickmap_bins: co3d_constancy_val_clickmap_bins.npy +processed_medians: co3d_constancy_val_medians.json +remove_string: false +multi_thresh_gpu: true +constancy: true +camera_path: clickme_datasets/constancy_params.json +depth_path: /oscar/data/tserre/Users/pzhou10/CVM/onevision/assets/constancy_depths +save_json: true +time_based_bins: true diff --git a/configs/co3d_constancy_configs/constancy_ceiling_rank_cosine.yaml b/configs/co3d_constancy_configs/constancy_ceiling_rank_cosine.yaml new file mode 100644 index 0000000..2848284 --- /dev/null +++ b/configs/co3d_constancy_configs/constancy_ceiling_rank_cosine.yaml @@ -0,0 +1,45 @@ +assets: assets +blur_size: 21 +center_crop: +- 224 +- 224 +class_filter_file: false +# clickme_data: clickme_datasets/dump_co3d_combined_03_13_2025.npz +clickme_data: /cifs/data/tserre_lrs/projects/projects/prj_video_imagenet/human_clickme_data_processing/clickme_datasets/dump_co3d_05_28_2025_unique.npz +correlation_batch_size: 128 +debug: false +display_image_keys: auto +example_image_output_dir: co3d_constancy_val_images +experiment_name: co3d_constancy_val_obs +file_exclusion_filter: ILSVRC2012_val +file_inclusion_filter: CO3D_Constancy # CO3D_ClickMe2 +filter_mobile: true +gpu_batch_size: 4096 +image_path: /cifs/data/tserre_lrs/projects/projects/prj_video_imagenet/PeRFception/data/co3d_v2/clickme_trajectories/ +image_shape: +- 256 +- 256 +mask_dir: null +mask_threshold: 0. +max_clicks: 1000000 # Set to an impossibly high number to keep all maps +metadata_file: false +metric: rank_cosine +min_clicks: 1 +min_subjects: 4 +n_jobs: -1 +null_iterations: 20 # Potentially lower this number for speed up +parallel_prepare_maps: true +parallel_save: true +participant_filter: false +percentile_thresh: 50 +preprocess_db_data: false +processed_clickme_file: co3d_constancy_val_processed.npz +processed_clickmap_bins: co3d_constancy_val_clickmap_bins.npy +processed_medians: co3d_constancy_val_medians.json +remove_string: false +multi_thresh_gpu: true +constancy: true +camera_path: clickme_datasets/constancy_params.json +depth_path: /oscar/data/tserre/Users/pzhou10/CVM/onevision/assets/constancy_depths +save_json: true +time_based_bins: true diff --git a/configs/co3d_constancy_configs/constancy_ceiling_rank_pearson.yaml b/configs/co3d_constancy_configs/constancy_ceiling_rank_pearson.yaml new file mode 100644 index 0000000..5c838d6 --- /dev/null +++ b/configs/co3d_constancy_configs/constancy_ceiling_rank_pearson.yaml @@ -0,0 +1,45 @@ +assets: assets +blur_size: 21 +center_crop: +- 224 +- 224 +class_filter_file: false +# clickme_data: clickme_datasets/dump_co3d_combined_03_13_2025.npz +clickme_data: /cifs/data/tserre_lrs/projects/projects/prj_video_imagenet/human_clickme_data_processing/clickme_datasets/dump_co3d_05_28_2025_unique.npz +correlation_batch_size: 128 +debug: false +display_image_keys: auto +example_image_output_dir: co3d_constancy_val_images +experiment_name: co3d_constancy_val_obs +file_exclusion_filter: ILSVRC2012_val +file_inclusion_filter: CO3D_Constancy # CO3D_ClickMe2 +filter_mobile: true +gpu_batch_size: 4096 +image_path: /cifs/data/tserre_lrs/projects/projects/prj_video_imagenet/PeRFception/data/co3d_v2/clickme_trajectories/ +image_shape: +- 256 +- 256 +mask_dir: null +mask_threshold: 0. +max_clicks: 1000000 # Set to an impossibly high number to keep all maps +metadata_file: false +metric: rank_pearson +min_clicks: 1 +min_subjects: 4 +n_jobs: -1 +null_iterations: 20 # Potentially lower this number for speed up +parallel_prepare_maps: true +parallel_save: true +participant_filter: false +percentile_thresh: 50 +preprocess_db_data: false +processed_clickme_file: co3d_constancy_val_processed.npz +processed_clickmap_bins: co3d_constancy_val_clickmap_bins.npy +processed_medians: co3d_constancy_val_medians.json +remove_string: false +multi_thresh_gpu: true +constancy: true +camera_path: clickme_datasets/constancy_params.json +depth_path: /oscar/data/tserre/Users/pzhou10/CVM/onevision/assets/constancy_depths +save_json: true +time_based_bins: true diff --git a/configs/constancy_ceiling_spearman.yaml b/configs/co3d_constancy_configs/constancy_ceiling_spearman.yaml similarity index 97% rename from configs/constancy_ceiling_spearman.yaml rename to configs/co3d_constancy_configs/constancy_ceiling_spearman.yaml index 06fff09..6ab3bf2 100644 --- a/configs/constancy_ceiling_spearman.yaml +++ b/configs/co3d_constancy_configs/constancy_ceiling_spearman.yaml @@ -10,7 +10,7 @@ correlation_batch_size: 128 debug: false display_image_keys: auto example_image_output_dir: co3d_constancy_val_images -experiment_name: co3d_constancy_val +experiment_name: co3d_constancy_val_obs file_exclusion_filter: ILSVRC2012_val file_inclusion_filter: CO3D_Constancy # CO3D_ClickMe2 filter_mobile: true diff --git a/configs/exp_configs/imagenet_val_oscar_max_10.yaml b/configs/exp_configs/imagenet_val_oscar_max_10.yaml new file mode 100644 index 0000000..8c68fa9 --- /dev/null +++ b/configs/exp_configs/imagenet_val_oscar_max_10.yaml @@ -0,0 +1,46 @@ +assets: assets +temp_dir: temp +blur_size: 21 +center_crop: +- 224 +- 224 +class_filter_file: false +clickme_data: /cifs/data/tserre_lrs/projects/projects/prj_video_imagenet/human_clickme_data_processing/clickme_datasets/val_combined_08_27_2025.npz +correlation_batch_size: 1024 +debug: false +display_image_keys: auto +example_image_output_dir: jay_imagenet_val_combined_08_27_2025_images +experiment_name: exp_10_subjects_08_27_2025 +file_exclusion_filter: null +file_inclusion_filter: ILSVRC2012_val +filter_mobile: true +gpu_batch_size: 4096 +image_path: /gpfs/data/shared/imagenet/ILSVRC2012/val +image_shape: +- 256 +- 256 +mask_dir: null +mask_threshold: 0 +max_clicks: 1000000 +metadata_file: image_metadata/jay_imagenet_val_04_30_2025_dimensions.npy +metric: spearman +min_clicks: 1 +min_subjects: 30 +max_subjects: 10 +n_jobs: -1 +null_iterations: 50 +parallel_prepare_maps: true +parallel_save: false +participant_filter: false +percentile_thresh: 50 +preprocess_db_data: true +processed_clickme_file: jay_imagenet_val_combined_08_27_2025_processed.npz +processed_medians: jay_imagenet_val_combined_08_27_2025_medians.json +remove_string: imagenet/val/ +time_based_bins: true +multi_thresh_gpu: multi_thresh_gpu +output_format: "hdf5" +processed_clickmap_bins: jay_imagenet_val_combined_08_27_2025_clickmap_bins.npy +chunk_size: 100000 +batch_size: 14000 +save_json: true diff --git a/configs/exp_configs/imagenet_val_oscar_max_15.yaml b/configs/exp_configs/imagenet_val_oscar_max_15.yaml new file mode 100644 index 0000000..36d9eb1 --- /dev/null +++ b/configs/exp_configs/imagenet_val_oscar_max_15.yaml @@ -0,0 +1,46 @@ +assets: assets +temp_dir: temp +blur_size: 21 +center_crop: +- 224 +- 224 +class_filter_file: false +clickme_data: /cifs/data/tserre_lrs/projects/projects/prj_video_imagenet/human_clickme_data_processing/clickme_datasets/val_combined_08_27_2025.npz +correlation_batch_size: 1024 +debug: false +display_image_keys: auto +example_image_output_dir: jay_imagenet_val_combined_08_27_2025_images +experiment_name: exp_15_subjects_08_27_2025 +file_exclusion_filter: null +file_inclusion_filter: ILSVRC2012_val +filter_mobile: true +gpu_batch_size: 4096 +image_path: /gpfs/data/shared/imagenet/ILSVRC2012/val +image_shape: +- 256 +- 256 +mask_dir: null +mask_threshold: 0 +max_clicks: 1000000 +metadata_file: image_metadata/jay_imagenet_val_04_30_2025_dimensions.npy +metric: spearman +min_clicks: 1 +min_subjects: 30 +max_subjects: 15 +n_jobs: -1 +null_iterations: 50 +parallel_prepare_maps: true +parallel_save: false +participant_filter: false +percentile_thresh: 50 +preprocess_db_data: true +processed_clickme_file: jay_imagenet_val_combined_08_27_2025_processed.npz +processed_medians: jay_imagenet_val_combined_08_27_2025_medians.json +remove_string: imagenet/val/ +time_based_bins: true +multi_thresh_gpu: multi_thresh_gpu +output_format: "hdf5" +processed_clickmap_bins: jay_imagenet_val_combined_08_27_2025_clickmap_bins.npy +chunk_size: 100000 +batch_size: 14000 +save_json: true diff --git a/configs/exp_configs/imagenet_val_oscar_max_20.yaml b/configs/exp_configs/imagenet_val_oscar_max_20.yaml new file mode 100644 index 0000000..c87fb70 --- /dev/null +++ b/configs/exp_configs/imagenet_val_oscar_max_20.yaml @@ -0,0 +1,46 @@ +assets: assets +temp_dir: temp +blur_size: 21 +center_crop: +- 224 +- 224 +class_filter_file: false +clickme_data: /cifs/data/tserre_lrs/projects/projects/prj_video_imagenet/human_clickme_data_processing/clickme_datasets/val_combined_08_27_2025.npz +correlation_batch_size: 1024 +debug: false +display_image_keys: auto +example_image_output_dir: jay_imagenet_val_combined_08_27_2025_images +experiment_name: exp_20_subjects_08_27_2025 +file_exclusion_filter: null +file_inclusion_filter: ILSVRC2012_val +filter_mobile: true +gpu_batch_size: 4096 +image_path: /gpfs/data/shared/imagenet/ILSVRC2012/val +image_shape: +- 256 +- 256 +mask_dir: null +mask_threshold: 0 +max_clicks: 1000000 +metadata_file: image_metadata/jay_imagenet_val_04_30_2025_dimensions.npy +metric: spearman +min_clicks: 1 +min_subjects: 30 +max_subjects: 20 +n_jobs: -1 +null_iterations: 50 +parallel_prepare_maps: true +parallel_save: false +participant_filter: false +percentile_thresh: 50 +preprocess_db_data: true +processed_clickme_file: jay_imagenet_val_combined_08_27_2025_processed.npz +processed_medians: jay_imagenet_val_combined_08_27_2025_medians.json +remove_string: imagenet/val/ +time_based_bins: true +multi_thresh_gpu: multi_thresh_gpu +output_format: "hdf5" +processed_clickmap_bins: jay_imagenet_val_combined_08_27_2025_clickmap_bins.npy +chunk_size: 100000 +batch_size: 14000 +save_json: true diff --git a/configs/exp_configs/imagenet_val_oscar_max_25.yaml b/configs/exp_configs/imagenet_val_oscar_max_25.yaml new file mode 100644 index 0000000..fc70d79 --- /dev/null +++ b/configs/exp_configs/imagenet_val_oscar_max_25.yaml @@ -0,0 +1,46 @@ +assets: assets +temp_dir: temp +blur_size: 21 +center_crop: +- 224 +- 224 +class_filter_file: false +clickme_data: /cifs/data/tserre_lrs/projects/projects/prj_video_imagenet/human_clickme_data_processing/clickme_datasets/val_combined_08_27_2025.npz +correlation_batch_size: 1024 +debug: false +display_image_keys: auto +example_image_output_dir: jay_imagenet_val_combined_08_27_2025_images +experiment_name: exp_25_subjects_08_27_2025 +file_exclusion_filter: null +file_inclusion_filter: ILSVRC2012_val +filter_mobile: true +gpu_batch_size: 4096 +image_path: /gpfs/data/shared/imagenet/ILSVRC2012/val +image_shape: +- 256 +- 256 +mask_dir: null +mask_threshold: 0 +max_clicks: 1000000 +metadata_file: image_metadata/jay_imagenet_val_04_30_2025_dimensions.npy +metric: spearman +min_clicks: 1 +min_subjects: 30 +max_subjects: 25 +n_jobs: -1 +null_iterations: 50 +parallel_prepare_maps: true +parallel_save: false +participant_filter: false +percentile_thresh: 50 +preprocess_db_data: true +processed_clickme_file: jay_imagenet_val_combined_08_27_2025_processed.npz +processed_medians: jay_imagenet_val_combined_08_27_2025_medians.json +remove_string: imagenet/val/ +time_based_bins: true +multi_thresh_gpu: multi_thresh_gpu +output_format: "hdf5" +processed_clickmap_bins: jay_imagenet_val_combined_08_27_2025_clickmap_bins.npy +chunk_size: 100000 +batch_size: 14000 +save_json: true diff --git a/configs/exp_configs/imagenet_val_oscar_max_30.yaml b/configs/exp_configs/imagenet_val_oscar_max_30.yaml new file mode 100644 index 0000000..afea899 --- /dev/null +++ b/configs/exp_configs/imagenet_val_oscar_max_30.yaml @@ -0,0 +1,46 @@ +assets: assets +temp_dir: temp +blur_size: 21 +center_crop: +- 224 +- 224 +class_filter_file: false +clickme_data: /cifs/data/tserre_lrs/projects/projects/prj_video_imagenet/human_clickme_data_processing/clickme_datasets/val_combined_08_27_2025.npz +correlation_batch_size: 1024 +debug: false +display_image_keys: auto +example_image_output_dir: jay_imagenet_val_combined_08_27_2025_images +experiment_name: exp_30_subjects_08_27_2025 +file_exclusion_filter: null +file_inclusion_filter: ILSVRC2012_val +filter_mobile: true +gpu_batch_size: 4096 +image_path: /gpfs/data/shared/imagenet/ILSVRC2012/val +image_shape: +- 256 +- 256 +mask_dir: null +mask_threshold: 0 +max_clicks: 1000000 +metadata_file: image_metadata/jay_imagenet_val_04_30_2025_dimensions.npy +metric: spearman +min_clicks: 1 +min_subjects: 30 +max_subjects: 30 +n_jobs: -1 +null_iterations: 50 +parallel_prepare_maps: true +parallel_save: false +participant_filter: false +percentile_thresh: 50 +preprocess_db_data: true +processed_clickme_file: jay_imagenet_val_combined_08_27_2025_processed.npz +processed_medians: jay_imagenet_val_combined_08_27_2025_medians.json +remove_string: imagenet/val/ +time_based_bins: true +multi_thresh_gpu: multi_thresh_gpu +output_format: "hdf5" +processed_clickmap_bins: jay_imagenet_val_combined_08_27_2025_clickmap_bins.npy +chunk_size: 100000 +batch_size: 14000 +save_json: true diff --git a/configs/exp_configs/imagenet_val_oscar_max_5.yaml b/configs/exp_configs/imagenet_val_oscar_max_5.yaml new file mode 100644 index 0000000..c172ccc --- /dev/null +++ b/configs/exp_configs/imagenet_val_oscar_max_5.yaml @@ -0,0 +1,46 @@ +assets: assets +temp_dir: temp +blur_size: 21 +center_crop: +- 224 +- 224 +class_filter_file: false +clickme_data: /cifs/data/tserre_lrs/projects/projects/prj_video_imagenet/human_clickme_data_processing/clickme_datasets/val_combined_08_27_2025.npz +correlation_batch_size: 1024 +debug: false +display_image_keys: auto +example_image_output_dir: jay_imagenet_val_combined_08_27_2025_images +experiment_name: exp_5_subjects_08_27_2025 +file_exclusion_filter: null +file_inclusion_filter: ILSVRC2012_val +filter_mobile: true +gpu_batch_size: 4096 +image_path: /gpfs/data/shared/imagenet/ILSVRC2012/val +image_shape: +- 256 +- 256 +mask_dir: null +mask_threshold: 0 +max_clicks: 2048 +metadata_file: image_metadata/jay_imagenet_val_04_30_2025_dimensions.npy +metric: spearman +min_clicks: 1 +min_subjects: 30 +max_subjects: 5 +n_jobs: -1 +null_iterations: 50 +parallel_prepare_maps: true +parallel_save: false +participant_filter: false +percentile_thresh: 50 +preprocess_db_data: true +processed_clickme_file: jay_imagenet_val_combined_08_27_2025_processed.npz +processed_medians: jay_imagenet_val_combined_08_27_2025_medians.json +remove_string: imagenet/val/ +time_based_bins: true +multi_thresh_gpu: multi_thresh_gpu +output_format: "hdf5" +processed_clickmap_bins: jay_imagenet_val_combined_08_27_2025_clickmap_bins.npy +chunk_size: 100000 +batch_size: 14000 +save_json: true diff --git a/configs/imagenet_co3d_val_oscar.yaml b/configs/imgnet_configs/imagenet_co3d_val_oscar.yaml similarity index 64% rename from configs/imagenet_co3d_val_oscar.yaml rename to configs/imgnet_configs/imagenet_co3d_val_oscar.yaml index 2dd1bfc..e5681f2 100644 --- a/configs/imagenet_co3d_val_oscar.yaml +++ b/configs/imgnet_configs/imagenet_co3d_val_oscar.yaml @@ -4,17 +4,17 @@ center_crop: - 224 - 224 class_filter_file: category_maps/synset_to_co3d.npy -clickme_data: /cifs/data/tserre_lrs/projects/projects/prj_video_imagenet/human_clickme_data_processing/clickme_datasets/val_combined_07_09_2025.npz +clickme_data: /cifs/data/tserre_lrs/projects/projects/prj_video_imagenet/human_clickme_data_processing/clickme_datasets/val_combined_08_27_2025.npz correlation_batch_size: 1024 debug: false display_image_keys: auto -example_image_output_dir: jay_imagenet_co3d_val_04_30_2025_images -experiment_name: jay_imagenet_co3d_val_07_09_2025 -file_exclusion_filter: ILSVRC2012_train +example_image_output_dir: jay_imagenet_co3d_val_08_27_2025_images +experiment_name: jay_imagenet_co3d_val_08_27_2025 +file_exclusion_filter: null file_inclusion_filter: ILSVRC2012_val filter_mobile: true gpu_batch_size: 4096 -image_path: /cifs/data/tserre_lrs/projects/projects/prj_video_imagenet/imagenet/ILSVRC/Data/CLS-LOC/val2 +image_path: /gpfs/data/shared/imagenet/ILSVRC2012/val image_shape: - 256 - 256 @@ -32,9 +32,9 @@ parallel_save: true participant_filter: false percentile_thresh: 50 preprocess_db_data: true -processed_clickme_file: jay_imagenet_co3d_val_04_30_2025_processed.npz -processed_clickmap_bins: jay_imagenet_co3d_val_04_30_2025_clickmap_bins.npy -processed_medians: jay_imagenet_co3d_val_04_30_2025_medians.json +processed_clickme_file: jay_imagenet_co3d_val_08_27_2025_processed.npz +processed_clickmap_bins: jay_imagenet_co3d_val_08_27_2025_clickmap_bins.npy +processed_medians: jay_imagenet_co3d_val_08_27_2025_medians.json remove_string: imagenet/val/ output_format: "numpy" time_based_bins: true diff --git a/configs/imagenet_train_oscar.yaml b/configs/imgnet_configs/imagenet_train_oscar.yaml similarity index 68% rename from configs/imagenet_train_oscar.yaml rename to configs/imgnet_configs/imagenet_train_oscar.yaml index 9f3f985..e796760 100644 --- a/configs/imagenet_train_oscar.yaml +++ b/configs/imgnet_configs/imagenet_train_oscar.yaml @@ -4,17 +4,17 @@ center_crop: - 224 - 224 class_filter_file: false -clickme_data: /cifs/data/tserre_lrs/projects/projects/prj_video_imagenet/human_clickme_data_processing/clickme_datasets/train_combined_07_09_2025.npz +clickme_data: /cifs/data/tserre_lrs/projects/projects/prj_video_imagenet/human_clickme_data_processing/clickme_datasets/train_combined_08_27_2025.npz correlation_batch_size: 4000 debug: false display_image_keys: auto -example_image_output_dir: jay_imagenet_train_combined_07_09_2025_images -experiment_name: jay_imagenet_train_07_09_2025 +example_image_output_dir: jay_imagenet_train_combined_08_27_2025_images +experiment_name: jay_imagenet_train_08_27_2025 file_exclusion_filter: ILSVRC2012_val file_inclusion_filter: null filter_mobile: true gpu_batch_size: 4096 -image_path: /cifs/data/tserre_lrs/projects/projects/prj_video_imagenet/imagenet/ILSVRC/Data/CLS-LOC/train +image_path: /gpfs/data/shared/imagenet/ILSVRC2012/train image_shape: - 256 - 256 @@ -32,8 +32,8 @@ parallel_save: false participant_filter: false percentile_thresh: 50 preprocess_db_data: true -processed_clickme_file: jay_imagenet_train_combined_07_09_2025_processed.npz -processed_medians: jay_imagenet_train_combined_07_09_2025_medians.json +processed_clickme_file: jay_imagenet_train_combined_08_27_2025_processed.npz +processed_medians: jay_imagenet_train_combined_08_27_2025_medians.json remove_string: imagenet/train/ output_format: "hdf5" use_cython: true @@ -41,4 +41,4 @@ chunk_size: 100000 batch_size: 14000 time_based_bins: true multi_thresh_gpu: multi_thresh_gpu -processed_clickmap_bins: jay_imagenet_train_combined_07_09_2025_clickmap_bins.npy +processed_clickmap_bins: jay_imagenet_train_combined_08_27_2025_clickmap_bins.npy diff --git a/configs/imagenet_val_oscar.yaml b/configs/imgnet_configs/imagenet_val_oscar.yaml similarity index 64% rename from configs/imagenet_val_oscar.yaml rename to configs/imgnet_configs/imagenet_val_oscar.yaml index a17b004..eab86cb 100644 --- a/configs/imagenet_val_oscar.yaml +++ b/configs/imgnet_configs/imagenet_val_oscar.yaml @@ -1,20 +1,21 @@ assets: assets +temp_dir: temp blur_size: 21 center_crop: - 224 - 224 class_filter_file: false -clickme_data: /cifs/data/tserre_lrs/projects/projects/prj_video_imagenet/human_clickme_data_processing/clickme_datasets/val_combined_07_09_2025.npz +clickme_data: /cifs/data/tserre_lrs/projects/projects/prj_video_imagenet/human_clickme_data_processing/clickme_datasets/val_combined_08_27_2025.npz correlation_batch_size: 1024 debug: false display_image_keys: auto -example_image_output_dir: jay_imagenet_val_combined_07_09_2025_images -experiment_name: jay_imagenet_val_07_09_2025 -file_exclusion_filter: ILSVRC2012_train +example_image_output_dir: jay_imagenet_val_combined_08_27_2025_images +experiment_name: jay_imagenet_val_08_27_2025 +file_exclusion_filter: null file_inclusion_filter: ILSVRC2012_val filter_mobile: true gpu_batch_size: 4096 -image_path: /cifs/data/tserre_lrs/projects/projects/prj_video_imagenet/imagenet/ILSVRC/Data/CLS-LOC/val2 +image_path: /gpfs/data/shared/imagenet/ILSVRC2012/val image_shape: - 256 - 256 @@ -32,12 +33,12 @@ parallel_save: false participant_filter: false percentile_thresh: 50 preprocess_db_data: true -processed_clickme_file: jay_imagenet_val_combined_07_09_2025_processed.npz -processed_medians: jay_imagenet_val_combined_07_09_2025_medians.json +processed_clickme_file: jay_imagenet_val_combined_08_27_2025_processed.npz +processed_medians: jay_imagenet_val_combined_08_27_2025_medians.json remove_string: imagenet/val/ time_based_bins: true multi_thresh_gpu: multi_thresh_gpu output_format: "hdf5" -processed_clickmap_bins: jay_imagenet_val_combined_07_09_2025_clickmap_bins.npy +processed_clickmap_bins: jay_imagenet_val_combined_08_27_2025_clickmap_bins.npy chunk_size: 100000 -batch_size: 14000 \ No newline at end of file +batch_size: 14000 diff --git a/configs/imgnet_configs/imagenet_val_spearman_oscar.yaml b/configs/imgnet_configs/imagenet_val_spearman_oscar.yaml new file mode 100644 index 0000000..3fc4dc2 --- /dev/null +++ b/configs/imgnet_configs/imagenet_val_spearman_oscar.yaml @@ -0,0 +1,44 @@ +assets: assets +temp_dir: temp/imgnet_val.h5 +blur_size: 21 +center_crop: +- 224 +- 224 +class_filter_file: false +clickme_data: /cifs/data/tserre_lrs/projects/projects/prj_video_imagenet/human_clickme_data_processing/clickme_datasets/val_combined_08_27_2025.npz +correlation_batch_size: 4096 +debug: false +display_image_keys: auto +example_image_output_dir: jay_imagenet_val_combined_08_27_2025_images +experiment_name: jay_imagenet_val_08_27_2025 +file_exclusion_filter: null +file_inclusion_filter: ILSVRC2012_val +filter_mobile: true +gpu_batch_size: 4096 +image_path: /gpfs/data/shared/imagenet/ILSVRC2012/val +image_shape: +- 256 +- 256 +mask_dir: null +mask_threshold: 0 +max_clicks: 1000000 +metadata_file: image_metadata/jay_imagenet_val_04_30_2025_dimensions.npy +metric: spearman +min_clicks: 1 +min_subjects: 5 +n_jobs: -1 +null_iterations: 50 +parallel_prepare_maps: true +parallel_save: false +participant_filter: false +percentile_thresh: 50 +preprocess_db_data: true +processed_clickme_file: jay_imagenet_val_combined_08_27_2025_processed.npz +processed_medians: jay_imagenet_val_combined_08_27_2025_medians.json +remove_string: imagenet/val/ +time_based_bins: true +multi_thresh_gpu: multi_thresh_gpu +output_format: "hdf5" +processed_clickmap_bins: jay_imagenet_val_combined_08_27_2025_clickmap_bins.npy +chunk_size: 100000 +batch_size: 14000 \ No newline at end of file diff --git a/configs/jay_imagenet_for_co3d_train_0.1.yaml b/configs/imgnet_configs/jay_imagenet_for_co3d_train_0.1.yaml similarity index 91% rename from configs/jay_imagenet_for_co3d_train_0.1.yaml rename to configs/imgnet_configs/jay_imagenet_for_co3d_train_0.1.yaml index 333feaf..fca5283 100644 --- a/configs/jay_imagenet_for_co3d_train_0.1.yaml +++ b/configs/imgnet_configs/jay_imagenet_for_co3d_train_0.1.yaml @@ -14,7 +14,7 @@ file_exclusion_filter: false file_inclusion_filter: ILSVRC2012_val filter_mobile: true gpu_batch_size: 1024 -image_path: /media/data_cifs/projects/prj_video_imagenet/imagenet/ILSVRC/Data/CLS-LOC/val2 +image_path: /gpfs/data/shared/imagenet/ILSVRC2012/val image_shape: - 256 - 256 diff --git a/configs/jay_imagenet_for_co3d_val_0.1.yaml b/configs/imgnet_configs/jay_imagenet_for_co3d_val_0.1.yaml similarity index 91% rename from configs/jay_imagenet_for_co3d_val_0.1.yaml rename to configs/imgnet_configs/jay_imagenet_for_co3d_val_0.1.yaml index 547433b..3ff3390 100644 --- a/configs/jay_imagenet_for_co3d_val_0.1.yaml +++ b/configs/imgnet_configs/jay_imagenet_for_co3d_val_0.1.yaml @@ -14,7 +14,7 @@ file_exclusion_filter: false file_inclusion_filter: ILSVRC2012_val filter_mobile: true gpu_batch_size: 4096 -image_path: /media/data_cifs/projects/prj_video_imagenet/imagenet/ILSVRC/Data/CLS-LOC/val2 +image_path: /gpfs/data/shared/imagenet/ILSVRC2012/val image_shape: - 256 - 256 diff --git a/configs/jay_imagenet_co3d_val_04_02_2025.yaml b/configs/jay_imagenet_co3d_val_04_02_2025.yaml deleted file mode 100644 index 1d5ca78..0000000 --- a/configs/jay_imagenet_co3d_val_04_02_2025.yaml +++ /dev/null @@ -1,39 +0,0 @@ -assets: assets -blur_size: 21 -center_crop: -- 256 -- 256 -class_filter_file: category_maps/synset_to_co3d.npy -clickme_data: clickme_datasets/val_combined_04_02_2025.npz -correlation_batch_size: 1024 -debug: false -display_image_keys: auto -example_image_output_dir: jay_imagenet_co3d_val_04_02_2025_images -experiment_name: jay_imagenet_co3d_val_04_02_2025 -file_exclusion_filter: ILSVRC2012_train -file_inclusion_filter: ILSVRC2012_val -filter_mobile: true -gpu_batch_size: 16384 -image_path: /media/data_cifs/projects/prj_video_imagenet/imagenet/ILSVRC/Data/CLS-LOC/val2 -image_shape: -- 256 -- 256 -mask_dir: false -mask_threshold: 1 -max_clicks: 75 -metadata_file: image_metadata/jay_imagenet_val_0.1_dimensions.npy -metric: spearman -min_clicks: 1 -min_subjects: 6 -n_jobs: -1 -null_iterations: 10 -parallel_prepare_maps: true -parallel_save: false -participant_filter: false -percentile_thresh: 50 -preprocess_db_data: true -processed_clickme_file: jay_imagenet_co3d_val_04_02_2025_processed.npz -processed_clickmap_bins: jay_imagenet_co3d_val_04_02_2025_clickmap_bins.npy -processed_medians: jay_imagenet_co3d_val_04_02_2025_medians.json -remove_string: imagenet/val/ -output_format: "numpy" diff --git a/configs/jay_imagenet_co3d_val_04_30_2025.yaml b/configs/jay_imagenet_co3d_val_04_30_2025.yaml deleted file mode 100644 index c8c9688..0000000 --- a/configs/jay_imagenet_co3d_val_04_30_2025.yaml +++ /dev/null @@ -1,39 +0,0 @@ -assets: assets -blur_size: 21 -center_crop: -- 224 -- 224 -class_filter_file: category_maps/synset_to_co3d.npy -clickme_data: clickme_datasets/val_combined_04_30_2025.npz -correlation_batch_size: 1024 -debug: false -display_image_keys: auto -example_image_output_dir: jay_imagenet_co3d_val_04_30_2025_images -experiment_name: jay_imagenet_co3d_val_04_30_2025 -file_exclusion_filter: ILSVRC2012_train -file_inclusion_filter: ILSVRC2012_val -filter_mobile: true -gpu_batch_size: 16384 -image_path: /media/data_cifs/projects/prj_video_imagenet/imagenet/ILSVRC/Data/CLS-LOC/val2 -image_shape: -- 256 -- 256 -mask_dir: false -mask_threshold: 1 -max_clicks: 75 -metadata_file: image_metadata/jay_imagenet_val_04_30_2025_dimensions.npy -metric: spearman -min_clicks: 1 -min_subjects: 6 -n_jobs: -1 -null_iterations: 10 -parallel_prepare_maps: true -parallel_save: false -participant_filter: false -percentile_thresh: 50 -preprocess_db_data: true -processed_clickme_file: jay_imagenet_co3d_val_04_30_2025_processed.npz -processed_clickmap_bins: jay_imagenet_co3d_val_04_30_2025_clickmap_bins.npy -processed_medians: jay_imagenet_co3d_val_04_30_2025_medians.json -remove_string: imagenet/val/ -output_format: "numpy" diff --git a/configs/jay_imagenet_train_0.1.yaml b/configs/jay_imagenet_train_0.1.yaml deleted file mode 100644 index 80d168b..0000000 --- a/configs/jay_imagenet_train_0.1.yaml +++ /dev/null @@ -1,36 +0,0 @@ -assets: assets -blur_size: 21 -center_crop: -- 224 -- 224 -class_filter_file: false -clickme_data: clickme_datasets/train_imagenet_10_17_2024.npz -correlation_batch_size: 1024 -debug: false -display_image_keys: auto -example_image_output_dir: jay_imagenet_train_0.1_images -experiment_name: jay_imagenet_train_0.1 -file_exclusion_filter: false -file_inclusion_filter: false -filter_mobile: true -gpu_batch_size: 1024 -image_path: /media/data_cifs/projects/prj_video_imagenet/imagenet/ILSVRC/Data/CLS-LOC/train -image_shape: -- 256 -- 256 -mask_dir: false -mask_threshold: 1 -max_clicks: 100000 -metadata_file: image_metadata/jay_imagenet_train_0.1_dimensions.npy -metric: spearman -min_clicks: 10 -min_subjects: 3 -n_jobs: -1 -null_iterations: 10 -parallel_prepare_maps: true -participant_filter: false -percentile_thresh: 50 -preprocess_db_data: true -processed_clickme_file: jay_imagenet_train_0.1_processed.npz -processed_medians: jay_imagenet_train_0.1_medians.json -remove_string: imagenet/train/ diff --git a/configs/jay_imagenet_train_02_19_2025.yaml b/configs/jay_imagenet_train_02_19_2025.yaml deleted file mode 100644 index 17208b6..0000000 --- a/configs/jay_imagenet_train_02_19_2025.yaml +++ /dev/null @@ -1,37 +0,0 @@ -assets: assets -blur_size: 21 -center_crop: -- 256 -- 256 -class_filter_file: false -clickme_data: db_dumps/train_combined_02_26_2025.npz -correlation_batch_size: 1024 -debug: false -display_image_keys: auto -example_image_output_dir: jay_imagenet_train_combined_02_19_2025_images -experiment_name: jay_imagenet_train_02_19_2025 -file_exclusion_filter: false -file_inclusion_filter: false -filter_mobile: true -gpu_batch_size: 1024 -image_path: /media/data_cifs/projects/prj_video_imagenet/imagenet/ILSVRC/Data/CLS-LOC/train -image_shape: -- 256 -- 256 -mask_dir: false -mask_threshold: 1 -max_clicks: 10000 -metadata_file: image_metadata/jay_imagenet_train_0.1_dimensions.npy -metric: spearman -min_clicks: 3 -min_subjects: 3 -n_jobs: -1 -null_iterations: 10 -parallel_prepare_maps: true -parallel_save: true -participant_filter: false -percentile_thresh: 50 -preprocess_db_data: true -processed_clickme_file: jay_imagenet_train_combined_02_19_2025_processed.npz -processed_medians: jay_imagenet_train_combined_02_19_2025_medians.json -remove_string: imagenet/train/ diff --git a/configs/jay_imagenet_train_04_02_2025.yaml b/configs/jay_imagenet_train_04_02_2025.yaml deleted file mode 100644 index bca5d3d..0000000 --- a/configs/jay_imagenet_train_04_02_2025.yaml +++ /dev/null @@ -1,43 +0,0 @@ -assets: assets -blur_size: 21 -center_crop: -- 256 -- 256 -class_filter_file: false -clickme_data: clickme_datasets/train_combined_04_02_2025.npz -correlation_batch_size: 4096 -debug: false -display_image_keys: auto -example_image_output_dir: jay_imagenet_train_combined_04_02_2025_images -experiment_name: jay_imagenet_train_04_02_2025 -file_exclusion_filter: ILSVRC2012_val -file_inclusion_filter: null -filter_mobile: true -gpu_batch_size: 10000 -image_path: /media/data_cifs/projects/prj_video_imagenet/imagenet/ILSVRC/Data/CLS-LOC/train -image_shape: -- 256 -- 256 -mask_dir: null -mask_threshold: 1 -max_clicks: 10000 -metadata_file: image_metadata/jay_imagenet_train_0.1_dimensions.npy -metric: spearman -min_clicks: 1 -min_subjects: 3 -n_jobs: -1 -null_iterations: 10 -parallel_prepare_maps: true -parallel_save: false -participant_filter: false -percentile_thresh: 50 -preprocess_db_data: true -processed_clickme_file: jay_imagenet_train_combined_04_02_2025_processed.npz -processed_clickmap_bins: jay_imagenet_train_combined_04_02_2025_clickmap_bins.npy -processed_medians: jay_imagenet_train_combined_04_02_2025_medians.json -remove_string: imagenet/train/ -output_format: "hdf5" -use_cython: true -chunk_size: 100000 -batch_size: 50000 -multi_thresh_gpu: true diff --git a/configs/jay_imagenet_train_04_23_2025.yaml b/configs/jay_imagenet_train_04_23_2025.yaml deleted file mode 100644 index b6c50a6..0000000 --- a/configs/jay_imagenet_train_04_23_2025.yaml +++ /dev/null @@ -1,41 +0,0 @@ -assets: assets -blur_size: 21 -center_crop: -- 256 -- 256 -class_filter_file: false -clickme_data: clickme_datasets/train_combined_04_23_2025.npz -correlation_batch_size: 4000 -debug: false -display_image_keys: auto -example_image_output_dir: jay_imagenet_train_combined_04_23_2025_images -experiment_name: jay_imagenet_train_04_23_2025 -file_exclusion_filter: ILSVRC2012_val -file_inclusion_filter: null -filter_mobile: true -gpu_batch_size: 5000 -image_path: /media/data_cifs/projects/prj_video_imagenet/imagenet/ILSVRC/Data/CLS-LOC/train -image_shape: -- 256 -- 256 -mask_dir: null -mask_threshold: 1 -max_clicks: 10000 -metadata_file: image_metadata/jay_imagenet_train_0.1_dimensions.npy -metric: spearman -min_clicks: 3 -min_subjects: 3 -n_jobs: -1 -null_iterations: 10 -parallel_prepare_maps: true -parallel_save: false -participant_filter: false -percentile_thresh: 50 -preprocess_db_data: true -processed_clickme_file: jay_imagenet_train_combined_04_23_2025_processed.npz -processed_medians: jay_imagenet_train_combined_04_23_2025_medians.json -remove_string: imagenet/train/ -output_format: "hdf5" -use_cython: true -chunk_size: 100000 -batch_size: 14000 diff --git a/configs/jay_imagenet_train_04_30_2025.yaml b/configs/jay_imagenet_train_04_30_2025.yaml deleted file mode 100644 index 7a6ec84..0000000 --- a/configs/jay_imagenet_train_04_30_2025.yaml +++ /dev/null @@ -1,41 +0,0 @@ -assets: assets -blur_size: 21 -center_crop: -- 224 -- 224 -class_filter_file: false -clickme_data: clickme_datasets/train_combined_04_30_2025.npz -correlation_batch_size: 4000 -debug: false -display_image_keys: auto -example_image_output_dir: jay_imagenet_train_combined_04_30_2025_images -experiment_name: jay_imagenet_train_04_30_2025 -file_exclusion_filter: ILSVRC2012_val -file_inclusion_filter: null -filter_mobile: true -gpu_batch_size: 5000 -image_path: /media/data_cifs/projects/prj_video_imagenet/imagenet/ILSVRC/Data/CLS-LOC/train -image_shape: -- 256 -- 256 -mask_dir: null -mask_threshold: 1 -max_clicks: 10000 -metadata_file: image_metadata/jay_imagenet_train_04_30_2025_dimensions.npy -metric: spearman -min_clicks: 3 -min_subjects: 3 -n_jobs: -1 -null_iterations: 10 -parallel_prepare_maps: true -parallel_save: false -participant_filter: false -percentile_thresh: 50 -preprocess_db_data: true -processed_clickme_file: jay_imagenet_train_combined_04_30_2025_processed.npz -processed_medians: jay_imagenet_train_combined_04_30_2025_medians.json -remove_string: imagenet/train/ -output_format: "hdf5" -use_cython: true -chunk_size: 100000 -batch_size: 14000 diff --git a/configs/jay_imagenet_train_12_18_2024.yaml b/configs/jay_imagenet_train_12_18_2024.yaml deleted file mode 100644 index defb25c..0000000 --- a/configs/jay_imagenet_train_12_18_2024.yaml +++ /dev/null @@ -1,37 +0,0 @@ -assets: jay_work_in_progress -blur_size: 21 -center_crop: -- 256 -- 256 -class_filter_file: false -clickme_data: clickme_datasets/jay_imagenet_train_combined_12_18_2024.npz -correlation_batch_size: 4096 -debug: false -display_image_keys: auto -example_image_output_dir: jay_imagenet_train_combined_12_18_2024_images -experiment_name: jay_imagenet_train_12_18_2024 -file_exclusion_filter: false -file_inclusion_filter: false -filter_mobile: true -gpu_batch_size: 4096 -image_path: /media/data_cifs/projects/prj_video_imagenet/imagenet/ILSVRC/Data/CLS-LOC/train -image_shape: -- 256 -- 256 -mask_dir: false -mask_threshold: 1 -max_clicks: 75 -metadata_file: image_metadata/jay_imagenet_train_0.1_dimensions.npy -metric: spearman -min_clicks: 10 -min_subjects: 3 -n_jobs: -1 -null_iterations: 10 -parallel_prepare_maps: true -parallel_save: true -participant_filter: false -percentile_thresh: 50 -preprocess_db_data: true -processed_clickme_file: jay_imagenet_train_combined_12_18_2024_processed.npz -processed_medians: jay_imagenet_train_combined_12_18_2024_medians.json -remove_string: imagenet/train/ diff --git a/configs/jay_imagenet_val_0.1.yaml b/configs/jay_imagenet_val_0.1.yaml deleted file mode 100644 index c389888..0000000 --- a/configs/jay_imagenet_val_0.1.yaml +++ /dev/null @@ -1,37 +0,0 @@ -assets: assets -blur_size: 21 -center_crop: -- 224 -- 224 -class_filter_file: false -clickme_data: clickme_datasets/val_imagenet_10_17_2024.npz -correlation_batch_size: 1024 -debug: false -display_image_keys: auto -example_image_output_dir: jay_imagenet_val_0.1_images -experiment_name: jay_imagenet_val_0.1 -file_exclusion_filter: false -file_inclusion_filter: ILSVRC2012_val -filter_mobile: true -gpu_batch_size: 1024 -image_path: /media/data_cifs/projects/prj_video_imagenet/imagenet/ILSVRC/Data/CLS-LOC/val2 -image_shape: -- 256 -- 256 -mask_dir: false -mask_threshold: 1 -max_clicks: 75 -metadata_file: image_metadata/jay_imagenet_val_0.1_dimensions.npy -metric: spearman -min_clicks: 10 -min_subjects: 5 -n_jobs: -1 -null_iterations: 10 -parallel_prepare_maps: true -parallel_save: true -participant_filter: false -percentile_thresh: 50 -preprocess_db_data: true -processed_clickme_file: jay_imagenet_val_0.1_processed.npz -processed_medians: jay_imagenet_val_0.1_medians.json -remove_string: imagenet/val/ diff --git a/configs/jay_imagenet_val_02_19_2025.yaml b/configs/jay_imagenet_val_02_19_2025.yaml deleted file mode 100644 index 77b4972..0000000 --- a/configs/jay_imagenet_val_02_19_2025.yaml +++ /dev/null @@ -1,37 +0,0 @@ -assets: assets -blur_size: 21 -center_crop: -- 256 -- 256 -class_filter_file: false -clickme_data: clickme_datasets/val_combined_04_02_2025.npz -correlation_batch_size: 1024 -debug: false -display_image_keys: auto -example_image_output_dir: jay_imagenet_val_combined_04_02_2025_images -experiment_name: jay_imagenet_val_04_02_2025 -file_exclusion_filter: ILSVRC2012_train -file_inclusion_filter: ILSVRC2012_val -filter_mobile: true -gpu_batch_size: 4096 -image_path: /media/data_cifs/projects/prj_video_imagenet/imagenet/ILSVRC/Data/CLS-LOC/val2 -image_shape: -- 256 -- 256 -mask_dir: false -mask_threshold: 1 -max_clicks: 75 -metadata_file: image_metadata/jay_imagenet_val_0.1_dimensions.npy -metric: spearman -min_clicks: 10 -min_subjects: 5 -n_jobs: -1 -null_iterations: 10 -parallel_prepare_maps: true -parallel_save: true -participant_filter: false -percentile_thresh: 50 -preprocess_db_data: true -processed_clickme_file: jay_imagenet_val_combined_02_19_2025_processed.npz -processed_medians: jay_imagenet_val_combined_02_19_2025_medians.json -remove_string: imagenet/val/ diff --git a/configs/jay_imagenet_val_04_02_2025.yaml b/configs/jay_imagenet_val_04_02_2025.yaml deleted file mode 100644 index 6ce3c7b..0000000 --- a/configs/jay_imagenet_val_04_02_2025.yaml +++ /dev/null @@ -1,39 +0,0 @@ -assets: assets -blur_size: 21 -center_crop: -- 256 -- 256 -class_filter_file: false -clickme_data: clickme_datasets/val_combined_04_02_2025.npz -correlation_batch_size: 8 -debug: false -display_image_keys: auto -example_image_output_dir: jay_imagenet_val_combined_04_02_2025_images -experiment_name: jay_imagenet_val_04_02_2025 -file_exclusion_filter: ILSVRC2012_train -file_inclusion_filter: ILSVRC2012_val -filter_mobile: true -gpu_batch_size: 16384 -image_path: /media/data_cifs/projects/prj_video_imagenet/imagenet/ILSVRC/Data/CLS-LOC/val2 -image_shape: -- 256 -- 256 -mask_dir: null -mask_threshold: 0. -max_clicks: 100000 -metadata_file: image_metadata/jay_imagenet_val_0.1_dimensions.npy -metric: rankorder -min_clicks: 1 -min_subjects: 6 -n_jobs: -1 -null_iterations: 20 -parallel_prepare_maps: true -parallel_save: false -participant_filter: false -percentile_thresh: 50 -preprocess_db_data: true -processed_clickme_file: jay_imagenet_val_combined_04_02_2025_processed.npz -processed_clickmap_bins: jay_imagenet_val_combined_04_02_2025_clickmap_bins.npy -processed_medians: jay_imagenet_val_combined_04_02_2025_medians.json -remove_string: imagenet/val/ -multi_thresh_gpu: true diff --git a/configs/jay_imagenet_val_04_23_2025.yaml b/configs/jay_imagenet_val_04_23_2025.yaml deleted file mode 100644 index 43e37a3..0000000 --- a/configs/jay_imagenet_val_04_23_2025.yaml +++ /dev/null @@ -1,37 +0,0 @@ -assets: assets -blur_size: 21 -center_crop: -- 256 -- 256 -class_filter_file: false -clickme_data: clickme_datasets/val_combined_04_23_2025.npz -correlation_batch_size: 1024 -debug: false -display_image_keys: auto -example_image_output_dir: jay_imagenet_val_combined_04_23_2025_images -experiment_name: jay_imagenet_val_04_23_2025 -file_exclusion_filter: ILSVRC2012_train -file_inclusion_filter: ILSVRC2012_val -filter_mobile: true -gpu_batch_size: 16384 -image_path: /media/data_cifs/projects/prj_video_imagenet/imagenet/ILSVRC/Data/CLS-LOC/val2 -image_shape: -- 256 -- 256 -mask_dir: false -mask_threshold: 1 -max_clicks: 75 -metadata_file: image_metadata/jay_imagenet_val_0.1_dimensions.npy -metric: spearman -min_clicks: 10 -min_subjects: 5 -n_jobs: -1 -null_iterations: 10 -parallel_prepare_maps: true -parallel_save: false -participant_filter: false -percentile_thresh: 50 -preprocess_db_data: true -processed_clickme_file: jay_imagenet_val_combined_04_23_2025_processed.npz -processed_medians: jay_imagenet_val_combined_04_23_2025_medians.json -remove_string: imagenet/val/ diff --git a/configs/jay_imagenet_val_04_30_2025.yaml b/configs/jay_imagenet_val_04_30_2025.yaml deleted file mode 100644 index b9a99cf..0000000 --- a/configs/jay_imagenet_val_04_30_2025.yaml +++ /dev/null @@ -1,37 +0,0 @@ -assets: assets -blur_size: 21 -center_crop: -- 224 -- 224 -class_filter_file: false -clickme_data: clickme_datasets/val_combined_04_30_2025.npz -correlation_batch_size: 1024 -debug: false -display_image_keys: auto -example_image_output_dir: jay_imagenet_val_combined_04_30_2025_images -experiment_name: jay_imagenet_val_04_30_2025 -file_exclusion_filter: ILSVRC2012_train -file_inclusion_filter: ILSVRC2012_val -filter_mobile: true -gpu_batch_size: 16384 -image_path: /media/data_cifs/projects/prj_video_imagenet/imagenet/ILSVRC/Data/CLS-LOC/val2 -image_shape: -- 256 -- 256 -mask_dir: false -mask_threshold: 1 -max_clicks: 75 -metadata_file: image_metadata/jay_imagenet_val_04_30_2025_dimensions.npy -metric: spearman -min_clicks: 10 -min_subjects: 5 -n_jobs: -1 -null_iterations: 10 -parallel_prepare_maps: true -parallel_save: false -participant_filter: false -percentile_thresh: 50 -preprocess_db_data: true -processed_clickme_file: jay_imagenet_val_combined_04_30_2025_processed.npz -processed_medians: jay_imagenet_val_combined_04_30_2025_medians.json -remove_string: imagenet/val/ diff --git a/configs/jay_imagenet_val_12_18_2024.yaml b/configs/jay_imagenet_val_12_18_2024.yaml deleted file mode 100644 index 41751cb..0000000 --- a/configs/jay_imagenet_val_12_18_2024.yaml +++ /dev/null @@ -1,36 +0,0 @@ -assets: jay_work_in_progress -blur_size: 21 -center_crop: -- 256 -- 256 -class_filter_file: false -clickme_data: clickme_datasets/jay_imagenet_val_combined_12_18_2024.npz -correlation_batch_size: 1024 -debug: false -display_image_keys: auto -example_image_output_dir: jay_imagenet_val_combined_12_18_2024_images -experiment_name: jay_imagenet_val_12_18_2024 -file_exclusion_filter: false -file_inclusion_filter: ILSVRC2012_val -filter_mobile: true -gpu_batch_size: 1024 -image_path: /media/data_cifs/projects/prj_video_imagenet/imagenet/ILSVRC/Data/CLS-LOC/val2 -image_shape: -- 256 -- 256 -mask_dir: null -mask_threshold: 1 -max_clicks: 75 -metadata_file: image_metadata/jay_imagenet_val_0.1_dimensions.npy -metric: spearman -min_clicks: 10 -min_subjects: 5 -n_jobs: -1 -null_iterations: 10 -parallel_prepare_maps: true -participant_filter: false -percentile_thresh: 50 -preprocess_db_data: true -processed_clickme_file: jay_imagenet_val_combined_12_18_2024_processed.npz -processed_medians: jay_imagenet_val_combined_12_18_2024_medians.json -remove_string: imagenet/val/ diff --git a/prepare_clickmaps.py b/prepare_clickmaps.py index c7921bc..f4e8fe8 100644 --- a/prepare_clickmaps.py +++ b/prepare_clickmaps.py @@ -47,6 +47,9 @@ if "time_based_bins" not in config: config["time_based_bins"] = args.time_based_bins + if "max_subjects" not in config: + config["max_subjects"] = float('inf') + # Load clickme data print(f"Loading clickme data...") clickme_data = utils.process_clickme_data( diff --git a/sample_clickmaps.py b/sample_clickmaps.py new file mode 100644 index 0000000..dab36ad --- /dev/null +++ b/sample_clickmaps.py @@ -0,0 +1,57 @@ +import os +import numpy as np +from src.utils import process_clickme_data +from tqdm import tqdm +# Sample clickmaps that have more than 30 subjects while maintaining class distribution +if __name__ == "__main__": + clickme_data_file = "/cifs/data/tserre_lrs/projects/projects/prj_video_imagenet/human_clickme_data_processing/clickme_datasets/val_combined_08_27_2025.npz" + clickme_data = process_clickme_data(clickme_data_file, True) + total_maps = len(clickme_data) + total_numbers = {} + for _, row in tqdm(clickme_data.iterrows(), total=len(clickme_data), desc="Processing clickmaps"): + image_path = row['image_path'] + image_file_name = os.path.sep.join(row['image_path'].split(os.path.sep)[-2:]) + cls_name = image_file_name.split('/')[0] + if cls_name not in total_numbers: + total_numbers[cls_name] = {} + if "ILSVRC2012_val" not in image_file_name: + continue + if image_path not in total_numbers[cls_name]: + total_numbers[cls_name][image_path] = 1 + else: + total_numbers[cls_name][image_path] += 1 + sampled_img_paths = {} + total_counts = [] + for cls_name, image_paths in total_numbers.items(): + numbers = [] + sampled_img_paths[cls_name] = [] + sampled_names = [] + for img_path, number in image_paths.items(): + numbers.append(number) + if number > 20: + sampled_names.append(img_path) + sampled_img_paths[cls_name].append(img_path) + numbers = np.array(numbers) + larger_than = np.sum(numbers>20) + if larger_than > 0: + total_counts.append(larger_than) + + for cls_name, img_paths in sampled_img_paths.items(): + sampled_img_paths[cls_name] = img_paths[:5] + sampled_clickme_data = clickme_data.copy() + + allowed_files = { + f"{img_path}" for _, img_paths in sampled_img_paths.items() for img_path in img_paths + } + print(allowed_files) + print(sampled_clickme_data) + print(len(sampled_clickme_data)) + # Keep only rows whose file name is allowed + sampled_clickme_data = sampled_clickme_data[ + sampled_clickme_data["image_path"].isin(allowed_files) + ] + print(len(sampled_clickme_data)) + sampled_clickme_data.to_csv(os.path.join('clickme_datasets', 'sampled_imgnet_val.csv')) + + + \ No newline at end of file diff --git a/scripts/process_imgnet.sh b/scripts/process_imgnet.sh new file mode 100644 index 0000000..a451e71 --- /dev/null +++ b/scripts/process_imgnet.sh @@ -0,0 +1,2 @@ +python prepare_clickmaps.py configs/imgnet_configs/imagenet_train_oscar.yaml +python prepare_clickmaps.py configs/imgnet_configs/imagenet_val_oscar.yaml diff --git a/scripts/run_exp.sh b/scripts/run_exp.sh new file mode 100644 index 0000000..379c33e --- /dev/null +++ b/scripts/run_exp.sh @@ -0,0 +1,11 @@ +# python ceiling_floor_estimate.py configs/exp_configs/imagenet_val_oscar_max_5.yaml +# python ceiling_floor_estimate.py configs/exp_configs/imagenet_val_oscar_max_10.yaml +#python ceiling_floor_estimate.py configs/exp_configs/imagenet_val_oscar_max_15.yaml +# python ceiling_floor_estimate.py configs/exp_configs/imagenet_val_oscar_max_20.yaml +python ceiling_floor_estimate.py configs/exp_configs/imagenet_val_oscar_max_25.yaml +# python ceiling_floor_estimate.py configs/exp_configs/imagenet_val_oscar_max_30.yaml + +python ceiling_floor_estimate.py configs/balance_exp_configs/imagenet_val_oscar_max_5.yaml +python ceiling_floor_estimate.py configs/balance_exp_configs/imagenet_val_oscar_max_10.yaml +python ceiling_floor_estimate.py configs/balance_exp_configs/imagenet_val_oscar_max_15.yaml +python ceiling_floor_estimate.py configs/balance_exp_configs/imagenet_val_oscar_max_20.yaml \ No newline at end of file diff --git a/src/utils.py b/src/utils.py index b54572b..b34df17 100644 --- a/src/utils.py +++ b/src/utils.py @@ -12,11 +12,12 @@ from train_subject_classifier import RNN from accelerate import Accelerator from joblib import Parallel, delayed +import joblib import psutil +import h5py from PIL import Image from scipy.ndimage import maximum_filter - # Near the top of the file (around line 10), add torch.cuda memory management functions try: import torch.cuda @@ -376,10 +377,15 @@ def process_single_row(row): return (image_file_name, tuples_list) # Process rows in parallel - results = Parallel(n_jobs=1)( - delayed(process_single_row)(row) - for _, row in tqdm(clickme_data.iterrows(), total=len(clickme_data), desc="Processing clickmaps") - ) + # results = Parallel(n_jobs=1)( + # delayed(process_single_row)(row) + # for _, row in tqdm(clickme_data.iterrows(), total=len(clickme_data), desc="Processing clickmaps") + # ) + + results = [] + for _, row in tqdm(clickme_data.iterrows(), total=len(clickme_data), desc="Processing clickmaps"): + single_row_result = process_single_row(row) + results.append(single_row_result) # Combine results proc_clickmaps = {} @@ -1662,19 +1668,22 @@ def process_all_maps_multi_thresh_gpu( thresholds=10, return_before_blur=False, time_based_bins=False, + save_to_disk=False, + maximum_length=5000, ): """ Simplified function to blur clickmaps on GPU in batches with adaptive kernel sizing """ - import torch - from tqdm import tqdm - import numpy as np - + if save_to_disk: + assert return_before_blur + temp_file = h5py.File(config['temp_dir'], 'w') + temp_group = temp_file.create_group("clickmaps") # Extract basic parameters blur_size = config["blur_size"] blur_sigma = config.get("blur_sigma", blur_size) image_shape = config["image_shape"] min_subjects = config["min_subjects"] + max_subjects = config["max_subjects"] min_clicks = config["min_clicks"] max_kernel_size = config.get("max_kernel_size", 51) blur_sigma_function = config.get("blur_sigma_function", lambda x: x) @@ -1697,11 +1706,13 @@ def process_all_maps_multi_thresh_gpu( final_clickmaps = {} clickmap_bins = {} click_counts = {} # Track click counts for each image - + total_maps = 0 + if save_to_disk: + save_count = 0 # Preprocess all clickmaps first to binary maps - for key, trials in clickmaps.items(): + for clickmap_idx, (key, trials) in tqdm(enumerate(clickmaps.items()), "Pre-processing on CPU"): if len(trials) < min_subjects: - print("Not enough subjects", key, len(trials)) + # print("Not enough subjects", key, len(trials)) continue if time_based_bins: lens = [len(x) for x in trials] @@ -1709,9 +1720,6 @@ def process_all_maps_multi_thresh_gpu( for trial in trials: max_count = len(trial) half_count = int(max_count/2) - #below_mean = np.linspace(max(half_count * .1, min_clicks), half_count, thresholds //2).astype(int) - #above_mean = np.linspace(half_count+1, max_count + 1, thresholds // 2).astype(int) - #trial_bin = np.concatenate([below_mean, above_mean]) trial_bin = np.linspace(max(half_count * .1, min_clicks), max_count, thresholds).astype(int) bins.append(trial_bin) bin_clickmaps = [] @@ -1799,11 +1807,20 @@ def process_all_maps_multi_thresh_gpu( click_counts[key] = len(trials) # Store total clicks for this image clickmap_bins[key] = np.asarray(bin_counts) # Add to all_clickmaps with the appropriate method - if return_before_blur: + if save_to_disk: + temp_group.create_dataset(f"clickmap_{str(clickmap_idx).zfill(8)}", data=np.stack(bin_clickmaps, axis=0)) + elif return_before_blur: + bin_clickmaps = np.stack(bin_clickmaps, axis=0) + if bin_clickmaps.shape[1] < min_subjects: + continue + bin_clickmaps = bin_clickmaps[:, :max_subjects, :, :] all_clickmaps.append(np.stack(bin_clickmaps, axis=0)) else: all_clickmaps.append(np.concatenate(bin_clickmaps, axis=0)) - if not all_clickmaps: + if save_to_disk: + temp_file.close() + + if not save_to_disk and not all_clickmaps: print("No valid clickmaps to process") return {}, [], [], [], {} @@ -1921,7 +1938,6 @@ def process_all_maps_multi_thresh_gpu( torch.cuda.empty_cache() return final_clickmaps, all_clickmaps, categories, keep_index, click_counts, clickmap_bins - def blur_maps_for_cf(all_clickmaps, blur_size, blur_sigma, gpu_batch_size, native_size=None): # Step 2: Prepare for batch blurring on GPU total_maps = len(all_clickmaps) @@ -2021,7 +2037,7 @@ def blur_maps_for_cf(all_clickmaps, blur_size, blur_sigma, gpu_batch_size, nativ torch.cuda.empty_cache() return all_clickmaps -def sparse_scale(img, scale, device='cpu'): +def sparse_scale(img, scale, device='cpu', pad=True): if isinstance(img, np.ndarray): img = torch.tensor(img).to(device) input_shape = img.shape @@ -2054,15 +2070,17 @@ def sparse_scale(img, scale, device='cpu'): count_img[count_img==0] = 1 scaled_img = scaled_img/count_img - - pad_h = (org_h - new_h) // 2 - pad_w = (org_w - new_w) // 2 - diff_h = org_h - new_h - pad_h*2 - diff_w = org_w - new_w - pad_w*2 - padded_img = F.pad(scaled_img, (pad_h, pad_h+diff_h, pad_w, pad_w+diff_w)) - padded_img = padded_img.reshape(input_shape) - return padded_img - + if pad: + pad_h = (org_h - new_h) // 2 + pad_w = (org_w - new_w) // 2 + diff_h = org_h - new_h - pad_h*2 + diff_w = org_w - new_w - pad_w*2 + padded_img = F.pad(scaled_img, (pad_h, pad_h+diff_h, pad_w, pad_w+diff_w)) + padded_img = padded_img.reshape(input_shape) + return padded_img + else: + return scaled_img + def scale_img(img, scale, device='cpu'): if isinstance(img, np.ndarray): img = torch.tensor(img) @@ -2101,17 +2119,19 @@ def to_torch(x, device, dtype): return x.to(device, dtype=dtype) return torch.as_tensor(x, device=device, dtype=dtype) -def project_img_gpu(img, depth, w2c_s, w2c_t, K_s, K_t, device): - was_tensor = isinstance(img, torch.Tensor) - org_dtype = img.dtype if hasattr(img, 'dtype') else 'float32' +def project_img_gpu(img, depth, target_depth, w2c_s, w2c_t, K_s, K_t, device): + org_dtype = img.dtype if hasattr(img, 'dtype') else 'float32' + input_shape = img.shape + was_tensor = isinstance(img, torch.Tensor) + # Move everything to torch if isinstance(img, torch.Tensor): img = img else: img = torch.tensor(img).float() dtype = img.dtype - input_shape = img.shape + img = img.to(device) depth = to_torch(depth, device, dtype) # Convert to numpy to avoid lazy tensor operation in parallel @@ -2119,7 +2139,6 @@ def project_img_gpu(img, depth, w2c_s, w2c_t, K_s, K_t, device): K_s = K_s.cpu().numpy() K_s_inv = np.linalg.inv(K_s) K_s_inv = to_torch(K_s_inv, device, dtype) - # K_s = to_torch(K_s, device, dtype) K_t = to_torch(K_t, device, dtype) w2c_s = to_torch(w2c_s, device, dtype) w2c_t = to_torch(w2c_t, device, dtype) @@ -2128,10 +2147,8 @@ def project_img_gpu(img, depth, w2c_s, w2c_t, K_s, K_t, device): img = img.unsqueeze(0) # (1,H,W) elif img.ndim > 3: img = img.reshape(-1, img.shape[-2], img.shape[-1]) - C, H, W = img.shape assert depth.shape[-2:] == (H, W), "depth must match HxW of img" - R_s, T_s = w2c_s[:3, :3], w2c_s[:3, 3] R_t, T_t = w2c_t[:3, :3], w2c_t[:3, 3] @@ -2141,6 +2158,7 @@ def project_img_gpu(img, depth, w2c_s, w2c_t, K_s, K_t, device): torch.arange(W, device=device, dtype=dtype), indexing='ij' ) + pixels_h = torch.stack([xs, ys, torch.ones_like(xs)], dim=0).reshape(3, -1) # (3, N) depth_flat = depth.reshape(-1) # (N,) @@ -2164,10 +2182,11 @@ def project_img_gpu(img, depth, w2c_s, w2c_t, K_s, K_t, device): # ---- Valid mask ---- valid = ( (x_t >= 0) & (x_t < W) & - (y_t >= 0) & (y_t < H) & - (z_proj > 0) + (y_t >= 0) & (y_t < H) # & + # (z_proj > 0) ) + if not valid.any(): out = torch.zeros_like(img) if not was_tensor: @@ -2177,6 +2196,7 @@ def project_img_gpu(img, depth, w2c_s, w2c_t, K_s, K_t, device): x_t = x_t[valid] y_t = y_t[valid] z_t = z_proj[valid] + img_flat = img.view(img.shape[0], -1)[:, valid] # (C, N_valid) # ---- Z-buffer via scatter_reduce (amin) ---- @@ -2184,7 +2204,9 @@ def project_img_gpu(img, depth, w2c_s, w2c_t, K_s, K_t, device): num_pixels = H * W # Per-target-pixel min depth - z_min = torch.full((num_pixels,), float('inf'), device=device, dtype=z_t.dtype) + # z_min = torch.full((num_pixels,), float('inf'), device=device, dtype=z_t.dtype) + # Use this to mask any point that's behind the actual object + z_min = torch.tensor(target_depth*1.1).reshape(-1).to(device).to(z_t.dtype) # PyTorch >= 1.12: Tensor.scatter_reduce_ z_min = z_min.scatter_reduce(0, flat_indices, z_t, reduce='amin', include_self=True) @@ -2202,9 +2224,6 @@ def project_img_gpu(img, depth, w2c_s, w2c_t, K_s, K_t, device): # Restore original shape/dtype if not was_tensor: target = target.detach().cpu().numpy().astype(org_dtype) - else: - target = target.detach().cpu().astype(org_dtype) - target = target.reshape(input_shape) return target @@ -2230,4 +2249,5 @@ def get_rot_target(img_idx): target_img_ids.append(target_img_idx) target_img_diffs.append(i) - return target_img_ids, target_img_diffs \ No newline at end of file + return target_img_ids, target_img_diffs + From 9dc4c50a09aa1ca758adea264dd7fa691e87e2f7 Mon Sep 17 00:00:00 2001 From: PPPayson Date: Fri, 19 Sep 2025 18:03:25 -0400 Subject: [PATCH 02/16] Center crop and resize --- ceiling_floor_estimate.py | 3 +- ceiling_floor_estimate_large.py | 99 +++++++++++-------- .../imagenet_val_spearman_oscar.yaml | 3 +- scripts/compute_floor.sh | 10 ++ scripts/run_exp.sh | 2 +- src/utils.py | 15 +-- 6 files changed, 78 insertions(+), 54 deletions(-) create mode 100644 scripts/compute_floor.sh diff --git a/ceiling_floor_estimate.py b/ceiling_floor_estimate.py index e5c4129..4444094 100644 --- a/ceiling_floor_estimate.py +++ b/ceiling_floor_estimate.py @@ -598,7 +598,7 @@ def compute_correlation_batch(batch_indices, all_clickmaps, all_names, metric="a config["constancy"] = False if "max_subjects" not in config: - config["max_subjects"] = float('inf') + config["max_subjects"] = -1 if args.metric is not None: config["metric"] = args.metric @@ -723,7 +723,6 @@ def compute_correlation_batch(batch_indices, all_clickmaps, all_names, metric="a min_clicks=config["min_clicks"], max_clicks=config["max_clicks"], n_jobs=config["n_jobs"]) - # Apply filters if necessary if config["class_filter_file"]: print("Filtering classes...") diff --git a/ceiling_floor_estimate_large.py b/ceiling_floor_estimate_large.py index 58def14..2d66f90 100644 --- a/ceiling_floor_estimate_large.py +++ b/ceiling_floor_estimate_large.py @@ -173,7 +173,6 @@ def compute_correlation_batch(batch_indices, all_clickmaps, all_names, metric="a rand_i = np.random.randint(len(all_clickmaps) - 1) if rand_i >= i: rand_i += 1 - rand_clickmaps = all_clickmaps[rand_i] rand_name = all_names[rand_i] rand_hd5_name = f"clickmap_{str(rand_i).zfill(8)}" random_map = all_clickmaps[rand_hd5_name] @@ -320,7 +319,7 @@ def compute_correlation_batch(batch_indices, all_clickmaps, all_names, metric="a config_file = utils.get_config(sys.argv) config = utils.process_config(config_file) if "max_subjects" not in config: - config["max_subjects"] = float('inf') + config["max_subjects"] = -1 # Add filter_duplicates to config if not present if "filter_duplicates" not in config: config["filter_duplicates"] = args.filter_duplicates @@ -391,7 +390,6 @@ def compute_correlation_batch(batch_indices, all_clickmaps, all_names, metric="a image_output_dir = config["example_image_output_dir"] temp_dir = config["temp_dir"] - os.makedirs(temp_dir, exist_ok=True) os.makedirs(output_dir, exist_ok=True) os.makedirs(image_output_dir, exist_ok=True) os.makedirs(os.path.join(output_dir, config["experiment_name"]), exist_ok=True) @@ -552,26 +550,7 @@ def compute_correlation_batch(batch_indices, all_clickmaps, all_names, metric="a # n_jobs = adjusted_n_jobs # Process correlation batches in parallel - ceiling_returns = Parallel(n_jobs=n_jobs, prefer="threads")( - delayed(compute_correlation_batch)( - batch_indices=batch, - all_clickmaps=all_clickmaps, - all_names=final_keep_index, - metric=metric, - n_iterations=null_iterations, - device=device, - blur_size=config["blur_size"], - blur_sigma=config.get("blur_sigma", config["blur_size"]), - floor=False, - config=config, - metadata=metadata, - ) for batch in tqdm(batches, desc="Computing ceiling batches", total=len(batches)) - ) - ceiling_results, all_ceilings = zip(*ceiling_returns) - # Force garbage collection between major operations - gc.collect() - - # floor_returns = Parallel(n_jobs=n_jobs, prefer="threads")( + # ceiling_returns = Parallel(n_jobs=n_jobs, prefer="threads")( # delayed(compute_correlation_batch)( # batch_indices=batch, # all_clickmaps=all_clickmaps, @@ -581,27 +560,46 @@ def compute_correlation_batch(batch_indices, all_clickmaps, all_names, metric="a # device=device, # blur_size=config["blur_size"], # blur_sigma=config.get("blur_sigma", config["blur_size"]), - # floor=True, + # floor=False, # config=config, # metadata=metadata, - # ) for batch in tqdm(batches, desc="Computing floor batches", total=len(batches)) + # ) for batch in tqdm(batches, desc="Computing ceiling batches", total=len(batches)) # ) - # floor_results, all_floors = zip(*floor_returns) + # ceiling_results, all_ceilings = zip(*ceiling_returns) + # Force garbage collection between major operations + gc.collect() + + floor_returns = Parallel(n_jobs=n_jobs, prefer="threads")( + delayed(compute_correlation_batch)( + batch_indices=batch, + all_clickmaps=all_clickmaps, + all_names=final_keep_index, + metric=metric, + n_iterations=null_iterations, + device=device, + blur_size=config["blur_size"], + blur_sigma=config.get("blur_sigma", config["blur_size"]), + floor=True, + config=config, + metadata=metadata, + ) for batch in tqdm(batches, desc="Computing floor batches", total=len(batches)) + ) + floor_results, all_floors = zip(*floor_returns) all_img_ceilings = {} - #all_img_floors = {} - for img_ceilings in all_ceilings: - for img_name, score in img_ceilings.items(): - all_img_ceilings[img_name] = score - # for img_ceilings in all_floors: + all_img_floors = {} + # for img_ceilings in all_ceilings: # for img_name, score in img_ceilings.items(): - # all_img_floors[img_name] = score + # all_img_ceilings[img_name] = score + for img_ceilings in all_floors: + for img_name, score in img_ceilings.items(): + all_img_floors[img_name] = score # Flatten the results - all_ceilings = np.concatenate(ceiling_results) - # all_floors = np.concatenate(floor_results) + # all_ceilings = np.concatenate(ceiling_results) + all_floors = np.concatenate(floor_results) # Compute the mean of the ceilings and floors - mean_ceiling = all_ceilings.mean() - # mean_floor = all_floors.mean() + # mean_ceiling = all_ceilings.mean() + mean_floor = all_floors.mean() # Compute the ratio of the mean of the ceilings to the mean of the floors # ratio = mean_ceiling / mean_floor @@ -609,21 +607,36 @@ def compute_correlation_batch(batch_indices, all_clickmaps, all_names, metric="a # Save the results np.savez( - os.path.join(output_dir, f"{config['experiment_name']}_{config['metric']}_ceiling_results.npz"), - mean_ceiling=mean_ceiling, - all_ceilings=all_ceilings, - all_img_ceilings=all_img_ceilings) + os.path.join(output_dir, f"{config['experiment_name']}_{config['metric']}_floor_results.npz"), + mean_floor=mean_floor, + all_floors=all_floors, + all_img_floors=all_img_floors) if config['save_json']: # Save as json - with open(os.path.join(output_dir, f"{config['experiment_name']}_{config['metric']}_ceiling_results.json"), 'w') as f: - output_json = {"all_imgs": final_keep_index, 'mean_ceiling':mean_ceiling, - 'all_ceilings':all_ceilings, 'all_img_ceilings':all_img_ceilings} + with open(os.path.join(output_dir, f"{config['experiment_name']}_{config['metric']}_floor_results.json"), 'w') as f: + output_json = {"all_imgs": final_keep_index, 'mean_floor':mean_floor, + 'all_floors':all_floors, 'all_img_floors':all_img_floors} for key, value in output_json.items(): if isinstance(value, np.ndarray): output_json[key] = value.tolist() output_content = json.dumps(output_json, indent=4) f.write(output_content) # np.savez( + # os.path.join(output_dir, f"{config['experiment_name']}_{config['metric']}_ceiling_results.npz"), + # mean_ceiling=mean_ceiling, + # all_ceilings=all_ceilings, + # all_img_ceilings=all_img_ceilings) + # if config['save_json']: + # # Save as json + # with open(os.path.join(output_dir, f"{config['experiment_name']}_{config['metric']}_ceiling_results.json"), 'w') as f: + # output_json = {"all_imgs": final_keep_index, 'mean_ceiling':mean_ceiling, + # 'all_ceilings':all_ceilings, 'all_img_ceilings':all_img_ceilings} + # for key, value in output_json.items(): + # if isinstance(value, np.ndarray): + # output_json[key] = value.tolist() + # output_content = json.dumps(output_json, indent=4) + # f.write(output_content) + # np.savez( # os.path.join(output_dir, f"{config['experiment_name']}_{config['metric']}_ceiling_floor_results.npz"), # mean_ceiling=mean_ceiling, # mean_floor=mean_floor, diff --git a/configs/imgnet_configs/imagenet_val_spearman_oscar.yaml b/configs/imgnet_configs/imagenet_val_spearman_oscar.yaml index 3fc4dc2..0da9449 100644 --- a/configs/imgnet_configs/imagenet_val_spearman_oscar.yaml +++ b/configs/imgnet_configs/imagenet_val_spearman_oscar.yaml @@ -41,4 +41,5 @@ multi_thresh_gpu: multi_thresh_gpu output_format: "hdf5" processed_clickmap_bins: jay_imagenet_val_combined_08_27_2025_clickmap_bins.npy chunk_size: 100000 -batch_size: 14000 \ No newline at end of file +batch_size: 14000 +save_json: true \ No newline at end of file diff --git a/scripts/compute_floor.sh b/scripts/compute_floor.sh new file mode 100644 index 0000000..fdd119c --- /dev/null +++ b/scripts/compute_floor.sh @@ -0,0 +1,10 @@ +#!/bin/bash +#SBATCH -J ImgNet_Floor +#SBATCH -N 1-1 +#SBATCH -n 16 +#SBATCH -t 96:00:00 +#SBATCH --gres=gpu:1 +#SBATCH --mem=512G +#SBATCH -p gpu-he +echo Starting execution at `date` +conda run -p ../../gs-perception/venv python ceiling_floor_estimate_large.py configs/imgnet_configs/imagenet_val_spearman_oscar.yaml \ No newline at end of file diff --git a/scripts/run_exp.sh b/scripts/run_exp.sh index 379c33e..9178270 100644 --- a/scripts/run_exp.sh +++ b/scripts/run_exp.sh @@ -2,7 +2,7 @@ # python ceiling_floor_estimate.py configs/exp_configs/imagenet_val_oscar_max_10.yaml #python ceiling_floor_estimate.py configs/exp_configs/imagenet_val_oscar_max_15.yaml # python ceiling_floor_estimate.py configs/exp_configs/imagenet_val_oscar_max_20.yaml -python ceiling_floor_estimate.py configs/exp_configs/imagenet_val_oscar_max_25.yaml +# python ceiling_floor_estimate.py configs/exp_configs/imagenet_val_oscar_max_25.yaml # python ceiling_floor_estimate.py configs/exp_configs/imagenet_val_oscar_max_30.yaml python ceiling_floor_estimate.py configs/balance_exp_configs/imagenet_val_oscar_max_5.yaml diff --git a/src/utils.py b/src/utils.py index b34df17..9b8b5bc 100644 --- a/src/utils.py +++ b/src/utils.py @@ -163,6 +163,7 @@ def filter_for_foreground_masks( def process_clickme_data(data_file, filter_mobile, catch_thresh=0.95): if "csv" in data_file: + df = pd.read_csv(data_file) return pd.read_csv(data_file) elif "npz" in data_file: print("Load npz") @@ -339,8 +340,6 @@ def process_single_row(row): return None image_file_name = folder_image_file_name image_file_names.append(folder_image_file_name) - # elif file_inclusion_filter and file_inclusion_filter not in image_file_name: - # return None elif file_inclusion_filter and file_inclusion_filter not in image_file_name: return None @@ -352,9 +351,10 @@ def process_single_row(row): clickmap = row["clicks"] if isinstance(clickmap, str): - clean_string = re.sub(r'[{}"]', '', clickmap) - tuple_strings = clean_string.split(', ') - data_list = tuple_strings[0].strip("()").split("),(") + clean_string = re.sub(r'[{}"\[\]]', '', clickmap) + # tuple_strings = clean_string.split(', ') + # data_list = tuple_strings.strip("()").split("),(") + data_list = clean_string.strip("()").split("), (") if len(data_list) == 1: # Remove empty clickmaps return None tuples_list = [tuple(map(int, pair.split(','))) for pair in data_list] @@ -1813,7 +1813,9 @@ def process_all_maps_multi_thresh_gpu( bin_clickmaps = np.stack(bin_clickmaps, axis=0) if bin_clickmaps.shape[1] < min_subjects: continue - bin_clickmaps = bin_clickmaps[:, :max_subjects, :, :] + if max_subjects > 0: + max_subjects = min(max_subjects, bin_clickmaps.shape[1]) + bin_clickmaps = bin_clickmaps[:, :max_subjects, :, :] all_clickmaps.append(np.stack(bin_clickmaps, axis=0)) else: all_clickmaps.append(np.concatenate(bin_clickmaps, axis=0)) @@ -2250,4 +2252,3 @@ def get_rot_target(img_idx): target_img_diffs.append(i) return target_img_ids, target_img_diffs - From d2af51f5f62e967e7fb944a9aaf65d245e95ed8a Mon Sep 17 00:00:00 2001 From: PPPayson Date: Fri, 19 Sep 2025 18:06:31 -0400 Subject: [PATCH 03/16] Remove commented code --- ceiling_floor_estimate_large.py | 98 ++++++++++++--------------------- 1 file changed, 34 insertions(+), 64 deletions(-) diff --git a/ceiling_floor_estimate_large.py b/ceiling_floor_estimate_large.py index 2d66f90..890e077 100644 --- a/ceiling_floor_estimate_large.py +++ b/ceiling_floor_estimate_large.py @@ -549,23 +549,23 @@ def compute_correlation_batch(batch_indices, all_clickmaps, all_names, metric="a # print(f"Reducing parallel jobs from {n_jobs} to {adjusted_n_jobs} to prevent 'too many files open' error") # n_jobs = adjusted_n_jobs - # Process correlation batches in parallel - # ceiling_returns = Parallel(n_jobs=n_jobs, prefer="threads")( - # delayed(compute_correlation_batch)( - # batch_indices=batch, - # all_clickmaps=all_clickmaps, - # all_names=final_keep_index, - # metric=metric, - # n_iterations=null_iterations, - # device=device, - # blur_size=config["blur_size"], - # blur_sigma=config.get("blur_sigma", config["blur_size"]), - # floor=False, - # config=config, - # metadata=metadata, - # ) for batch in tqdm(batches, desc="Computing ceiling batches", total=len(batches)) - # ) - # ceiling_results, all_ceilings = zip(*ceiling_returns) + Process correlation batches in parallel + ceiling_returns = Parallel(n_jobs=n_jobs, prefer="threads")( + delayed(compute_correlation_batch)( + batch_indices=batch, + all_clickmaps=all_clickmaps, + all_names=final_keep_index, + metric=metric, + n_iterations=null_iterations, + device=device, + blur_size=config["blur_size"], + blur_sigma=config.get("blur_sigma", config["blur_size"]), + floor=False, + config=config, + metadata=metadata, + ) for batch in tqdm(batches, desc="Computing ceiling batches", total=len(batches)) + ) + ceiling_results, all_ceilings = zip(*ceiling_returns) # Force garbage collection between major operations gc.collect() @@ -587,75 +587,45 @@ def compute_correlation_batch(batch_indices, all_clickmaps, all_names, metric="a floor_results, all_floors = zip(*floor_returns) all_img_ceilings = {} all_img_floors = {} - # for img_ceilings in all_ceilings: - # for img_name, score in img_ceilings.items(): - # all_img_ceilings[img_name] = score + for img_ceilings in all_ceilings: + for img_name, score in img_ceilings.items(): + all_img_ceilings[img_name] = score for img_ceilings in all_floors: for img_name, score in img_ceilings.items(): all_img_floors[img_name] = score # Flatten the results - # all_ceilings = np.concatenate(ceiling_results) + all_ceilings = np.concatenate(ceiling_results) all_floors = np.concatenate(floor_results) # Compute the mean of the ceilings and floors - # mean_ceiling = all_ceilings.mean() + mean_ceiling = all_ceilings.mean() mean_floor = all_floors.mean() # Compute the ratio of the mean of the ceilings to the mean of the floors - # ratio = mean_ceiling / mean_floor - # print(f"Mean ceiling: {mean_ceiling}, Mean floor: {mean_floor}, Ratio: {ratio}") + ratio = mean_ceiling / mean_floor + print(f"Mean ceiling: {mean_ceiling}, Mean floor: {mean_floor}, Ratio: {ratio}") # Save the results np.savez( - os.path.join(output_dir, f"{config['experiment_name']}_{config['metric']}_floor_results.npz"), + os.path.join(output_dir, f"{config['experiment_name']}_{config['metric']}_ceiling_floor_results.npz"), + mean_ceiling=mean_ceiling, mean_floor=mean_floor, + all_ceilings=all_ceilings, all_floors=all_floors, - all_img_floors=all_img_floors) + all_img_ceilings=all_img_ceilings, + all_img_floors=all_img_floors, + ratio=ratio) if config['save_json']: # Save as json - with open(os.path.join(output_dir, f"{config['experiment_name']}_{config['metric']}_floor_results.json"), 'w') as f: - output_json = {"all_imgs": final_keep_index, 'mean_floor':mean_floor, - 'all_floors':all_floors, 'all_img_floors':all_img_floors} + with open(os.path.join(output_dir, f"{config['experiment_name']}_{config['metric']}_ceiling_floor_results.json"), 'w') as f: + output_json = {"all_imgs": final_keep_index, 'mean_ceiling':mean_ceiling, 'mean_floor':mean_floor, + 'all_ceilings':all_ceilings, 'all_floors':all_floors, 'all_img_ceilings':all_img_ceilings, + 'all_img_floors':all_img_floors} for key, value in output_json.items(): if isinstance(value, np.ndarray): output_json[key] = value.tolist() output_content = json.dumps(output_json, indent=4) f.write(output_content) - # np.savez( - # os.path.join(output_dir, f"{config['experiment_name']}_{config['metric']}_ceiling_results.npz"), - # mean_ceiling=mean_ceiling, - # all_ceilings=all_ceilings, - # all_img_ceilings=all_img_ceilings) - # if config['save_json']: - # # Save as json - # with open(os.path.join(output_dir, f"{config['experiment_name']}_{config['metric']}_ceiling_results.json"), 'w') as f: - # output_json = {"all_imgs": final_keep_index, 'mean_ceiling':mean_ceiling, - # 'all_ceilings':all_ceilings, 'all_img_ceilings':all_img_ceilings} - # for key, value in output_json.items(): - # if isinstance(value, np.ndarray): - # output_json[key] = value.tolist() - # output_content = json.dumps(output_json, indent=4) - # f.write(output_content) - # np.savez( - # os.path.join(output_dir, f"{config['experiment_name']}_{config['metric']}_ceiling_floor_results.npz"), - # mean_ceiling=mean_ceiling, - # mean_floor=mean_floor, - # all_ceilings=all_ceilings, - # all_floors=all_floors, - # all_img_ceilings=all_img_ceilings, - # all_img_floors=all_img_floors, - # ratio=ratio) - # if config['save_json']: - # # Save as json - # with open(os.path.join(output_dir, f"{config['experiment_name']}_{config['metric']}_ceiling_floor_results.json"), 'w') as f: - # output_json = {"all_imgs": final_keep_index, 'mean_ceiling':mean_ceiling, 'mean_floor':mean_floor, - # 'all_ceilings':all_ceilings, 'all_floors':all_floors, 'all_img_ceilings':all_img_ceilings, - # 'all_img_floors':all_img_floors} - # for key, value in output_json.items(): - # if isinstance(value, np.ndarray): - # output_json[key] = value.tolist() - # output_content = json.dumps(output_json, indent=4) - # f.write(output_content) # Delete temp file to save disk if os.path.exists(temp_dir): From 145a8280c5dbc3b43cfa8758845a31e59087a9f9 Mon Sep 17 00:00:00 2001 From: PPPayson Date: Wed, 1 Oct 2025 13:20:58 -0400 Subject: [PATCH 04/16] Fix bootstrap --- .gitignore | 3 +- ceiling_floor_estimate.py | 12 +-- scripts/run_exp.sh | 20 ++-- src/__init__.py | 0 tools/__init__.py | 0 tools/find_top_bottom.py | 95 +++++++++++++++++++ .../sample_clickmaps.py | 0 7 files changed, 113 insertions(+), 17 deletions(-) create mode 100644 src/__init__.py create mode 100644 tools/__init__.py create mode 100644 tools/find_top_bottom.py rename sample_clickmaps.py => tools/sample_clickmaps.py (100%) diff --git a/.gitignore b/.gitignore index 2637615..20d99ee 100644 --- a/.gitignore +++ b/.gitignore @@ -17,4 +17,5 @@ jay_imagenet_train_04_30_2025_dimensions.npy jay_imagenet_train_0.1_dimensions.npy *.png clickme_datasets/ -*.csv \ No newline at end of file +*.csv +temp/ \ No newline at end of file diff --git a/ceiling_floor_estimate.py b/ceiling_floor_estimate.py index 4444094..a28fad7 100644 --- a/ceiling_floor_estimate.py +++ b/ceiling_floor_estimate.py @@ -184,15 +184,14 @@ def compute_rotation_correlation_batch(batch_indices, all_data, all_names, metri for iteration in range(n_iterations): test_rand_perm = np.random.permutation(n) fh = test_rand_perm[:(n//2)] - fh = random.choices(fh, k=n) + fh = list(fh) + random.choices(fh, k=n//2) test_map = clickmap_at_k[fh].mean(0) if not floor and target_img_name == img_name: target_rand_perm = test_rand_perm else: target_rand_perm = np.random.permutation(target_n) sh = target_rand_perm[(target_n//2):] - sh = random.choices(sh, k=target_n) - + sh = list(sh) + random.choices(sh, k=target_n//2) reference_map = target_clickmap_at_k[sh].mean(0) # Save for visualization if k == (len(clickmaps)-1) and iteration == (n_iterations-1): @@ -320,6 +319,7 @@ def compute_scale_correlation_batch(batch_indices, all_data, all_names, metric=" test_rand_perm = np.random.permutation(n) fh = test_rand_perm[:(n // 2)] fh = random.choices(fh, k=n) + fh = list(fh) + random.choices(fh, k=n//2) # sh = test_rand_perm[(n//2):] test_map = clickmap_at_k[fh].mean(0) if not floor and target_img_name == img_name: @@ -327,7 +327,7 @@ def compute_scale_correlation_batch(batch_indices, all_data, all_names, metric=" else: target_rand_perm = np.random.permutation(target_n) sh = target_rand_perm[(target_n//2):] - sh = random.choices(sh, k=target_n) + sh = list(sh) + random.choices(sh, k=target_n//2) reference_map = target_clickmap_at_k[sh].mean(0) if k == (len(clickmaps)-1) and iteration == (n_iterations - 1): @@ -469,13 +469,13 @@ def compute_correlation_batch(batch_indices, all_clickmaps, all_names, metric="a rand_perm = np.random.permutation(n) fh = rand_perm[:(n // 2)] # Add bootstrapping to max fh/sh size to original img - fh = random.choices(fh, k=n) + fh = list(fh) + random.choices(fh, k=n//2) # Create the test and reference maps test_map = clickmap_at_k[fh].mean(0) if floor: rand_perm = np.random.permutation(rand_n) sh = rand_perm[(rand_n // 2):] - sh = random.choices(sh, k=rand_n) + sh = list(sh) + random.choices(sh, k=rand_n//2) reference_map = rand_clickmap_at_k[sh].mean(0) # Take maps from the same level in a random other image reference_map = utils.blur_maps_for_cf( reference_map[None, None], diff --git a/scripts/run_exp.sh b/scripts/run_exp.sh index 9178270..ca61a99 100644 --- a/scripts/run_exp.sh +++ b/scripts/run_exp.sh @@ -1,11 +1,11 @@ -# python ceiling_floor_estimate.py configs/exp_configs/imagenet_val_oscar_max_5.yaml -# python ceiling_floor_estimate.py configs/exp_configs/imagenet_val_oscar_max_10.yaml -#python ceiling_floor_estimate.py configs/exp_configs/imagenet_val_oscar_max_15.yaml -# python ceiling_floor_estimate.py configs/exp_configs/imagenet_val_oscar_max_20.yaml -# python ceiling_floor_estimate.py configs/exp_configs/imagenet_val_oscar_max_25.yaml -# python ceiling_floor_estimate.py configs/exp_configs/imagenet_val_oscar_max_30.yaml +python ceiling_floor_estimate.py configs/exp_configs/imagenet_val_oscar_max_5.yaml +python ceiling_floor_estimate.py configs/exp_configs/imagenet_val_oscar_max_10.yaml +python ceiling_floor_estimate.py configs/exp_configs/imagenet_val_oscar_max_15.yaml +python ceiling_floor_estimate.py configs/exp_configs/imagenet_val_oscar_max_20.yaml +python ceiling_floor_estimate.py configs/exp_configs/imagenet_val_oscar_max_25.yaml +python ceiling_floor_estimate.py configs/exp_configs/imagenet_val_oscar_max_30.yaml -python ceiling_floor_estimate.py configs/balance_exp_configs/imagenet_val_oscar_max_5.yaml -python ceiling_floor_estimate.py configs/balance_exp_configs/imagenet_val_oscar_max_10.yaml -python ceiling_floor_estimate.py configs/balance_exp_configs/imagenet_val_oscar_max_15.yaml -python ceiling_floor_estimate.py configs/balance_exp_configs/imagenet_val_oscar_max_20.yaml \ No newline at end of file +# python ceiling_floor_estimate.py configs/balance_exp_configs/imagenet_val_oscar_max_5.yaml +# python ceiling_floor_estimate.py configs/balance_exp_configs/imagenet_val_oscar_max_10.yaml +# python ceiling_floor_estimate.py configs/balance_exp_configs/imagenet_val_oscar_max_15.yaml +# python ceiling_floor_estimate.py configs/balance_exp_configs/imagenet_val_oscar_max_20.yaml \ No newline at end of file diff --git a/src/__init__.py b/src/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tools/__init__.py b/tools/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tools/find_top_bottom.py b/tools/find_top_bottom.py new file mode 100644 index 0000000..38a25e6 --- /dev/null +++ b/tools/find_top_bottom.py @@ -0,0 +1,95 @@ +import os +import numpy as np +from src.utils import process_clickme_data +from tqdm import tqdm +import h5py +import json +from matplotlib import pyplot as plt +from PIL import Image +from src.utils import process_clickme_data + +def get_num_subjects(): + clickme_data_file = "/cifs/data/tserre_lrs/projects/projects/prj_video_imagenet/human_clickme_data_processing/clickme_datasets/val_combined_08_27_2025.npz" + clickme_data = process_clickme_data(clickme_data_file, True) + total_numbers = {} + for _, row in tqdm(clickme_data.iterrows(), total=len(clickme_data), desc="Processing clickmaps"): + image_path = row['image_path'] + image_file_name = row['image_path'].split(os.path.sep)[-1] + if "ILSVRC2012_val" not in image_path: + continue + if image_file_name not in total_numbers: + total_numbers[image_file_name] = 1 + else: + total_numbers[image_file_name] += 1 + return total_numbers + +def plot_clickmap(img, hmp, score, num_subjects, img_name, image_output_dir): + f = plt.figure() + plt.subplot(1, 2, 1) + plt.imshow(np.asarray(img)) + print(img_name, np.asarray(img).shape) + title = f"{img_name}\nSpearman: {score}\nNum Subjects: {num_subjects}" + plt.title(title) + plt.axis("off") + plt.subplot(1, 2, 2) + plt.imshow(hmp) + plt.axis("off") + plt.savefig(os.path.join(image_output_dir, img_name.replace('/', '_'))) + plt.close() + return + +if __name__ == "__main__": + scores_json = "assets/exp_30_subjects_08_27_2025_spearman_ceiling_floor_results.json" + data_root = "/gpfs/data/shared/imagenet/ILSVRC2012/val" + image_output_dir = "temp/top_bot_imgs_30" + os.makedirs(image_output_dir, exist_ok=True) + with open(scores_json, 'r') as f: + scores_dict = json.load(f)['all_img_ceilings'] + + val_map_files = ['assets/jay_imagenet_val_08_27_2025_batch001.h5', 'assets/jay_imagenet_val_08_27_2025_batch002.h5', + 'assets/jay_imagenet_val_08_27_2025_batch003.h5', 'assets/jay_imagenet_val_08_27_2025_batch004.h5'] + num_subjects_dict = get_num_subjects() + top10 = dict(sorted(scores_dict.items(), key=lambda x: x[1], reverse=True)[:10]) + bot10 = dict(sorted(scores_dict.items(), key=lambda x: x[1], reverse=False)[:10]) + top10_maps = [] + bot10_maps = [] + for map_file in val_map_files: + map_content = h5py.File(map_file, 'r')['clickmaps'] + for img_name in top10: + img_name = img_name.replace('/', '_') + if img_name in map_content: + top10_maps.append(map_content[img_name]['clickmap'][:].mean(0)) + for bot_img_name in bot10: + bot_img_name = bot_img_name.replace('/', '_') + + if bot_img_name in map_content: + bot10_maps.append(map_content[bot_img_name]['clickmap'][:].mean(0)) + if len(top10_maps) == 10 and len(bot10_maps) == 10: + break + top10_paths = [] + bot10_paths = [] + for img_name in top10: + img_name = img_name.split('/')[1] + img_path = os.path.join(data_root, f'{img_name}') + top10_paths.append(img_path) + for img_name in bot10: + img_name = img_name.split('/')[1] + img_path = os.path.join(data_root, f'{img_name}') + bot10_paths.append(img_path) + + for i, img_name in enumerate(top10): + score = scores_dict[img_name] + img_name = img_name.split('/')[1] + hmp = top10_maps[i] + img = Image.open(top10_paths[i]) + num_subjects = num_subjects_dict[img_name] + plot_clickmap(img, hmp, score, num_subjects, f"top_{img_name}", image_output_dir) + + + for i, img_name in enumerate(bot10): + score = scores_dict[img_name] + img_name = img_name.split('/')[1] + hmp = bot10_maps[i] + img = Image.open(bot10_paths[i]) + num_subjects = num_subjects_dict[img_name] + plot_clickmap(img, hmp, score, num_subjects, f"bottom_{img_name}", image_output_dir) \ No newline at end of file diff --git a/sample_clickmaps.py b/tools/sample_clickmaps.py similarity index 100% rename from sample_clickmaps.py rename to tools/sample_clickmaps.py From 5499d8e1ed65736900cd85d8d75323922179aabb Mon Sep 17 00:00:00 2001 From: PPPayson Date: Sun, 5 Oct 2025 15:22:48 -0400 Subject: [PATCH 05/16] fix image name matching --- ceiling_floor_estimate.py | 6 +- configs/debug/bottom_10.yaml | 46 + missing_val.json | 3564 ++++++++++++++++++++++++++++++++++ src/utils.py | 4 +- tools/find_missing_images.py | 15 + tools/sample_clickmaps.py | 9 +- 6 files changed, 3638 insertions(+), 6 deletions(-) create mode 100644 configs/debug/bottom_10.yaml create mode 100644 missing_val.json create mode 100644 tools/find_missing_images.py diff --git a/ceiling_floor_estimate.py b/ceiling_floor_estimate.py index a28fad7..c52f10e 100644 --- a/ceiling_floor_estimate.py +++ b/ceiling_floor_estimate.py @@ -423,6 +423,8 @@ def compute_correlation_batch(batch_indices, all_clickmaps, all_names, metric="a for i in tqdm(batch_indices, desc="Computing split-half correlations", total=len(batch_indices)): clickmaps = all_clickmaps[i] img_name = all_names[i] + print(img_name) + level_corrs = [] #TODO modify for speed up if metadata and img_name in metadata: @@ -465,7 +467,7 @@ def compute_correlation_batch(batch_indices, all_clickmaps, all_names, metric="a if floor: rand_clickmap_at_k = rand_clickmaps[k] rand_n = len(rand_clickmap_at_k) - for _ in range(n_iterations): + for n_iter in range(n_iterations): rand_perm = np.random.permutation(n) fh = rand_perm[:(n // 2)] # Add bootstrapping to max fh/sh size to original img @@ -527,7 +529,6 @@ def compute_correlation_batch(batch_indices, all_clickmaps, all_names, metric="a else: raise ValueError(f"Invalid metric: {metric}") rand_corrs.append(score) - # Explicitly free memory if 'blur_clickmaps' in locals(): del blur_clickmaps @@ -536,6 +537,7 @@ def compute_correlation_batch(batch_indices, all_clickmaps, all_names, metric="a level_corrs.append(rand_corrs) # Free memory gc.collect() + print(level_corrs) batch_results.append(np.asarray(level_corrs).mean()) # Integrate over the levels all_scores[img_name] = batch_results[-1] return batch_results, all_scores diff --git a/configs/debug/bottom_10.yaml b/configs/debug/bottom_10.yaml new file mode 100644 index 0000000..b122784 --- /dev/null +++ b/configs/debug/bottom_10.yaml @@ -0,0 +1,46 @@ +assets: assets +temp_dir: temp +blur_size: 21 +center_crop: +- 224 +- 224 +class_filter_file: false +clickme_data: clickme_datasets/bottom_imgnet_val.csv +correlation_batch_size: 1024 +debug: false +display_image_keys: auto +example_image_output_dir: bottom_imgnet +experiment_name: bottom_imgnet +file_exclusion_filter: null +file_inclusion_filter: ILSVRC2012_val +filter_mobile: true +gpu_batch_size: 4096 +image_path: /gpfs/data/shared/imagenet/ILSVRC2012/val +image_shape: +- 256 +- 256 +mask_dir: null +mask_threshold: 0 +max_clicks: 1000000 +metadata_file: image_metadata/jay_imagenet_val_04_30_2025_dimensions.npy +metric: spearman +min_clicks: 1 +min_subjects: 20 +max_subjects: 5 +n_jobs: -1 +null_iterations: 50 +parallel_prepare_maps: true +parallel_save: false +participant_filter: false +percentile_thresh: 50 +preprocess_db_data: true +processed_clickme_file: bottom_imgnet_processed.npz +processed_medians: bottom_imgnet_medians.json +remove_string: imagenet/val/ +time_based_bins: true +multi_thresh_gpu: multi_thresh_gpu +output_format: "hdf5" +processed_clickmap_bins: bottom_imgnet_clickmap_bins.npy +chunk_size: 100000 +batch_size: 14000 +save_json: true diff --git a/missing_val.json b/missing_val.json new file mode 100644 index 0000000..02bc91b --- /dev/null +++ b/missing_val.json @@ -0,0 +1,3564 @@ +{ + "val": [ + [ + "ILSVRC2012_val_00029670.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00048698.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00035220.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00003282.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00000121.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00040180.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00015472.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00010001.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00020672.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00022284.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00042767.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00041323.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00037298.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00028127.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00035837.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00018052.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00019683.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00023991.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00039757.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00030203.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00048026.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00016603.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00012459.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00010873.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00038728.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00046040.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00006496.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00018908.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00017928.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00035563.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00034820.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00025741.JPEG", + 7 + ], + [ + "ILSVRC2012_val_00003418.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00001391.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00032882.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00041317.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00047361.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00029711.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00030597.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00010983.JPEG", + 7 + ], + [ + "ILSVRC2012_val_00000770.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00046347.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00030919.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00018208.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00043736.JPEG", + 6 + ], + [ + "ILSVRC2012_val_00028586.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00022938.JPEG", + 7 + ], + [ + "ILSVRC2012_val_00021756.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00001634.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00015950.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00023351.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00000322.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00024452.JPEG", + 6 + ], + [ + "ILSVRC2012_val_00037519.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00049969.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00014274.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00022273.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00044490.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00049418.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00005117.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00044424.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00001285.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00025756.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00001289.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00006220.JPEG", + 7 + ], + [ + "ILSVRC2012_val_00046109.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00003447.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00045069.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00026145.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00021969.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00009068.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00024130.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00022126.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00031290.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00039876.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00032873.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00016356.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00019857.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00044587.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00027917.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00045526.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00026188.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00000243.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00033097.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00026732.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00036136.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00004509.JPEG", + 7 + ], + [ + "ILSVRC2012_val_00018501.JPEG", + 7 + ], + [ + "ILSVRC2012_val_00037449.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00047439.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00002734.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00030410.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00012287.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00007106.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00020387.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00025303.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00009838.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00026205.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00012028.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00008759.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00035926.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00024164.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00024581.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00035879.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00024732.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00009021.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00033330.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00033383.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00036476.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00028952.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00013685.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00042365.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00025590.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00005986.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00030709.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00026931.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00035004.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00017474.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00044807.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00029501.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00033525.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00001797.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00008302.JPEG", + 7 + ], + [ + "ILSVRC2012_val_00026335.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00026942.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00039780.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00033105.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00048676.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00027073.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00029258.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00042108.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00047771.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00045805.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00020145.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00023123.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00029263.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00047055.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00003577.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00022699.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00047094.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00004234.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00011894.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00007389.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00012271.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00020024.JPEG", + 7 + ], + [ + "ILSVRC2012_val_00021159.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00026309.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00049756.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00015715.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00039861.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00015945.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00015495.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00040371.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00045466.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00004540.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00012668.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00008996.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00021507.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00045803.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00034279.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00012671.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00039262.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00014868.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00008628.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00030329.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00003977.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00001622.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00029865.JPEG", + 7 + ], + [ + "ILSVRC2012_val_00005251.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00015726.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00026009.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00016003.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00029035.JPEG", + 7 + ], + [ + "ILSVRC2012_val_00014095.JPEG", + 7 + ], + [ + "ILSVRC2012_val_00037400.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00001307.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00031011.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00018401.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00011279.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00039606.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00009689.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00038006.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00014411.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00029953.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00024620.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00014524.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00044280.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00022364.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00045086.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00003626.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00003042.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00006619.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00014242.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00032215.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00030162.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00024214.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00039686.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00010387.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00049065.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00048011.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00043774.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00035109.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00048675.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00027193.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00003788.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00023812.JPEG", + 6 + ], + [ + "ILSVRC2012_val_00032576.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00000361.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00010571.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00036729.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00005928.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00025878.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00009953.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00003614.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00004343.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00000339.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00043733.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00013981.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00006376.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00042985.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00005440.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00018705.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00020125.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00048156.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00011486.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00014365.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00043535.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00046679.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00007441.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00040536.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00033806.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00049284.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00025935.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00019011.JPEG", + 6 + ], + [ + "ILSVRC2012_val_00000969.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00004214.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00045631.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00031210.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00046392.JPEG", + 5 + ], + [ + "ILSVRC2012_val_00021726.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00032059.JPEG", + 7 + ], + [ + "ILSVRC2012_val_00033332.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00010118.JPEG", + 5 + ], + [ + "ILSVRC2012_val_00016055.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00026768.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00017958.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00042218.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00012951.JPEG", + 7 + ], + [ + "ILSVRC2012_val_00005133.JPEG", + 7 + ], + [ + "ILSVRC2012_val_00023762.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00044403.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00003433.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00017758.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00016792.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00038481.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00031265.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00028377.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00027191.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00025097.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00005159.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00042549.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00049745.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00024976.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00013616.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00040617.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00023275.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00024307.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00001974.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00009489.JPEG", + 7 + ], + [ + "ILSVRC2012_val_00036610.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00012336.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00016980.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00010480.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00044078.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00042212.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00013889.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00015263.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00047319.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00005480.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00024519.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00011693.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00015865.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00042948.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00021635.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00004305.JPEG", + 6 + ], + [ + "ILSVRC2012_val_00020382.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00046876.JPEG", + 7 + ], + [ + "ILSVRC2012_val_00016454.JPEG", + 7 + ], + [ + "ILSVRC2012_val_00046887.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00022471.JPEG", + 7 + ], + [ + "ILSVRC2012_val_00045947.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00034959.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00023254.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00015664.JPEG", + 7 + ], + [ + "ILSVRC2012_val_00020509.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00047521.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00032918.JPEG", + 7 + ], + [ + "ILSVRC2012_val_00016525.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00018520.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00023936.JPEG", + 6 + ], + [ + "ILSVRC2012_val_00040633.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00005292.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00023981.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00008872.JPEG", + 7 + ], + [ + "ILSVRC2012_val_00035006.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00030689.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00037821.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00044762.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00038189.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00040994.JPEG", + 7 + ], + [ + "ILSVRC2012_val_00003334.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00011635.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00005668.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00021595.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00049946.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00018497.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00048404.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00030144.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00011836.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00027173.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00016199.JPEG", + 6 + ], + [ + "ILSVRC2012_val_00039903.JPEG", + 7 + ], + [ + "ILSVRC2012_val_00017147.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00034376.JPEG", + 7 + ], + [ + "ILSVRC2012_val_00010199.JPEG", + 7 + ], + [ + "ILSVRC2012_val_00036872.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00017500.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00013914.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00020247.JPEG", + 6 + ], + [ + "ILSVRC2012_val_00040845.JPEG", + 7 + ], + [ + "ILSVRC2012_val_00026920.JPEG", + 7 + ], + [ + "ILSVRC2012_val_00013518.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00002897.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00041965.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00011940.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00005196.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00046244.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00004090.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00029282.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00014615.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00019585.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00040378.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00006280.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00047639.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00010588.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00002201.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00012968.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00018850.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00011364.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00002675.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00004206.JPEG", + 7 + ], + [ + "ILSVRC2012_val_00006126.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00004887.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00030092.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00028969.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00047797.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00008720.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00011348.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00027116.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00020416.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00040280.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00011265.JPEG", + 7 + ], + [ + "ILSVRC2012_val_00005847.JPEG", + 6 + ], + [ + "ILSVRC2012_val_00029981.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00041926.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00008339.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00035622.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00019963.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00042514.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00002934.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00011422.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00011045.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00012389.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00018154.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00014091.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00003902.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00043304.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00033130.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00010280.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00008281.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00024268.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00028063.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00033483.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00021660.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00009890.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00032471.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00022618.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00000528.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00000177.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00045006.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00027559.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00004755.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00037826.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00033049.JPEG", + 7 + ], + [ + "ILSVRC2012_val_00009713.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00008042.JPEG", + 7 + ], + [ + "ILSVRC2012_val_00033843.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00035165.JPEG", + 7 + ], + [ + "ILSVRC2012_val_00024072.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00022934.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00035933.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00005900.JPEG", + 7 + ], + [ + "ILSVRC2012_val_00010461.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00003354.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00009654.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00025048.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00045634.JPEG", + 7 + ], + [ + "ILSVRC2012_val_00014762.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00041886.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00034856.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00000785.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00006757.JPEG", + 7 + ], + [ + "ILSVRC2012_val_00031865.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00042460.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00022085.JPEG", + 7 + ], + [ + "ILSVRC2012_val_00025974.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00023940.JPEG", + 7 + ], + [ + "ILSVRC2012_val_00029524.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00037958.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00013187.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00048962.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00015004.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00019533.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00020244.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00000087.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00029360.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00017173.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00028261.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00005929.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00022637.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00033993.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00043352.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00009628.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00011801.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00032699.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00030755.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00033210.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00044147.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00047264.JPEG", + 7 + ], + [ + "ILSVRC2012_val_00008565.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00006247.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00027578.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00048209.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00006016.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00031892.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00047348.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00013601.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00027716.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00038461.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00041357.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00012040.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00047064.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00010269.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00039758.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00045558.JPEG", + 6 + ], + [ + "ILSVRC2012_val_00025204.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00046926.JPEG", + 7 + ], + [ + "ILSVRC2012_val_00023367.JPEG", + 6 + ], + [ + "ILSVRC2012_val_00042146.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00010181.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00031074.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00006551.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00010135.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00035046.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00041475.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00026293.JPEG", + 7 + ], + [ + "ILSVRC2012_val_00028560.JPEG", + 6 + ], + [ + "ILSVRC2012_val_00034695.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00013403.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00039131.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00041292.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00000839.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00014214.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00031201.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00024997.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00036551.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00037969.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00033494.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00035698.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00013234.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00023961.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00035187.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00045817.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00040299.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00011040.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00041841.JPEG", + 7 + ], + [ + "ILSVRC2012_val_00003662.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00037937.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00023165.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00025273.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00045300.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00030883.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00031378.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00012180.JPEG", + 7 + ], + [ + "ILSVRC2012_val_00010420.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00017226.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00014045.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00036220.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00048370.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00049063.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00043887.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00049326.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00017551.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00034069.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00019353.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00014115.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00012221.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00030877.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00003711.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00020474.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00023530.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00017139.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00036295.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00006969.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00032684.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00044955.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00022503.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00016435.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00000213.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00027266.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00023807.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00039750.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00036448.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00041313.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00037076.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00017687.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00044972.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00047132.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00004080.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00017506.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00010209.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00030295.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00037441.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00045331.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00011157.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00048320.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00019000.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00028023.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00001558.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00013070.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00028976.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00002722.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00043708.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00029431.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00011628.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00020293.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00019550.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00043330.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00026753.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00032136.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00001807.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00011665.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00030227.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00035874.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00042537.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00036329.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00043499.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00023996.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00040186.JPEG", + 7 + ], + [ + "ILSVRC2012_val_00004152.JPEG", + 7 + ], + [ + "ILSVRC2012_val_00003831.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00012936.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00040012.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00046451.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00011178.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00018284.JPEG", + 7 + ], + [ + "ILSVRC2012_val_00034443.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00018228.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00030930.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00044601.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00006269.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00036335.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00006323.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00046354.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00046448.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00023578.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00038484.JPEG", + 7 + ], + [ + "ILSVRC2012_val_00049109.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00028392.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00016470.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00047076.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00007560.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00013104.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00042649.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00018329.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00027361.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00047640.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00001523.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00013797.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00027403.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00035994.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00003137.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00017074.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00015815.JPEG", + 7 + ], + [ + "ILSVRC2012_val_00012304.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00033414.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00020915.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00032473.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00035127.JPEG", + 7 + ], + [ + "ILSVRC2012_val_00037471.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00009267.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00023999.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00016773.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00016705.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00004113.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00024751.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00037379.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00035766.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00049170.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00023250.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00027755.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00009326.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00041620.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00004687.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00020637.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00011003.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00010449.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00032828.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00042451.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00018832.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00038218.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00033915.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00017726.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00000455.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00028497.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00004894.JPEG", + 7 + ], + [ + "ILSVRC2012_val_00033582.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00046925.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00012064.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00020386.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00039182.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00037483.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00049168.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00017638.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00008541.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00037418.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00045998.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00035618.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00002064.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00001099.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00005127.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00001858.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00005854.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00017434.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00042434.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00019369.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00019338.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00043731.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00014984.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00042356.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00031183.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00040718.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00001319.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00034558.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00047603.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00042675.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00044096.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00026919.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00042641.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00041033.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00032742.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00036519.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00039209.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00020098.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00018105.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00024891.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00007382.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00049713.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00015750.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00037728.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00035672.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00044115.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00020503.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00032384.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00017096.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00023626.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00000798.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00028360.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00014188.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00020051.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00021451.JPEG", + 7 + ], + [ + "ILSVRC2012_val_00043653.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00026237.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00042594.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00017429.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00021378.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00016947.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00013580.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00039583.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00049079.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00002144.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00030102.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00023947.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00026749.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00029056.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00007107.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00015140.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00025015.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00025951.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00022502.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00004342.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00018575.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00028135.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00009812.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00038029.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00034130.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00035969.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00001075.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00039194.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00036605.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00046103.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00018230.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00037499.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00024995.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00017088.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00037266.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00000308.JPEG", + 7 + ], + [ + "ILSVRC2012_val_00047113.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00002801.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00002611.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00014015.JPEG", + 7 + ], + [ + "ILSVRC2012_val_00019703.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00035822.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00042984.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00036001.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00039320.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00014351.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00026047.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00040078.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00033746.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00013332.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00034380.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00005530.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00040325.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00044654.JPEG", + 7 + ], + [ + "ILSVRC2012_val_00032077.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00028099.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00004513.JPEG", + 6 + ], + [ + "ILSVRC2012_val_00013667.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00042014.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00012753.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00016730.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00027621.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00019699.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00023363.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00021582.JPEG", + 7 + ], + [ + "ILSVRC2012_val_00012718.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00021786.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00001375.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00049094.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00006574.JPEG", + 7 + ], + [ + "ILSVRC2012_val_00009822.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00042301.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00043130.JPEG", + 7 + ], + [ + "ILSVRC2012_val_00018248.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00034349.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00033562.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00035140.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00042456.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00034176.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00035028.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00048163.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00006314.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00008928.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00012930.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00036543.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00034619.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00003435.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00018141.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00049296.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00020708.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00023411.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00048385.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00048756.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00014773.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00043366.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00029508.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00040630.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00038825.JPEG", + 7 + ], + [ + "ILSVRC2012_val_00000768.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00033826.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00013924.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00001714.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00021072.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00014207.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00008980.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00002040.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00001588.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00036792.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00007037.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00006392.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00028839.JPEG", + 6 + ], + [ + "ILSVRC2012_val_00008071.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00026752.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00024463.JPEG", + 5 + ], + [ + "ILSVRC2012_val_00020844.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00026157.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00042101.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00005501.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00005194.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00028206.JPEG", + 7 + ], + [ + "ILSVRC2012_val_00044315.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00008199.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00044810.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00018006.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00020222.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00040953.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00026127.JPEG", + 7 + ], + [ + "ILSVRC2012_val_00014075.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00046965.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00048673.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00000876.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00022639.JPEG", + 7 + ], + [ + "ILSVRC2012_val_00039798.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00042353.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00005997.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00027822.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00043487.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00026203.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00024949.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00037487.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00005513.JPEG", + 6 + ], + [ + "ILSVRC2012_val_00012849.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00046725.JPEG", + 7 + ], + [ + "ILSVRC2012_val_00000710.JPEG", + 7 + ], + [ + "ILSVRC2012_val_00021552.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00038854.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00046189.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00003420.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00037170.JPEG", + 7 + ], + [ + "ILSVRC2012_val_00042761.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00031789.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00028281.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00045153.JPEG", + 7 + ], + [ + "ILSVRC2012_val_00000430.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00017920.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00024746.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00007599.JPEG", + 7 + ], + [ + "ILSVRC2012_val_00010460.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00034802.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00045144.JPEG", + 7 + ], + [ + "ILSVRC2012_val_00035336.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00036654.JPEG", + 7 + ], + [ + "ILSVRC2012_val_00040073.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00027562.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00028471.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00019543.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00014748.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00023057.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00048407.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00002385.JPEG", + 7 + ], + [ + "ILSVRC2012_val_00038099.JPEG", + 7 + ], + [ + "ILSVRC2012_val_00013984.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00006752.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00028709.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00045449.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00046685.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00008904.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00046294.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00023818.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00025643.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00010706.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00033522.JPEG", + 7 + ], + [ + "ILSVRC2012_val_00041532.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00023192.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00025984.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00042392.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00027982.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00026463.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00022886.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00001479.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00012252.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00047481.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00011338.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00015891.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00019320.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00013880.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00008064.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00023154.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00040053.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00046777.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00028452.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00044206.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00033542.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00007533.JPEG", + 8 + ], + [ + "ILSVRC2012_val_00048729.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00012310.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00025428.JPEG", + 9 + ], + [ + "ILSVRC2012_val_00047880.JPEG", + 7 + ], + [ + "ILSVRC2012_val_00029162.JPEG", + 9 + ] + ] +} \ No newline at end of file diff --git a/src/utils.py b/src/utils.py index 9b8b5bc..5e4f817 100644 --- a/src/utils.py +++ b/src/utils.py @@ -1801,6 +1801,8 @@ def process_all_maps_multi_thresh_gpu( # continue # Only add to tracking structures if we can successfully process this image + if len(bin_clickmaps[0]) < min_subjects: + continue categories.append(key.split("/")[0]) keep_index.append(key) final_clickmaps[key] = trials @@ -1811,8 +1813,6 @@ def process_all_maps_multi_thresh_gpu( temp_group.create_dataset(f"clickmap_{str(clickmap_idx).zfill(8)}", data=np.stack(bin_clickmaps, axis=0)) elif return_before_blur: bin_clickmaps = np.stack(bin_clickmaps, axis=0) - if bin_clickmaps.shape[1] < min_subjects: - continue if max_subjects > 0: max_subjects = min(max_subjects, bin_clickmaps.shape[1]) bin_clickmaps = bin_clickmaps[:, :max_subjects, :, :] diff --git a/tools/find_missing_images.py b/tools/find_missing_images.py new file mode 100644 index 0000000..98bd2b4 --- /dev/null +++ b/tools/find_missing_images.py @@ -0,0 +1,15 @@ +import os +import json +from tools.find_top_bottom import get_num_subjects + +if __name__ == "__main__": + num_subjects = get_num_subjects() + missing_images = {"val":[]} + for img_name, num_subjects in num_subjects.items(): + if num_subjects < 10: + missing_images["val"].append((img_name, num_subjects)) + print(img_name, num_subjects) + + with open("missing_val.json", 'w') as f: + json_content = json.dumps(missing_images, indent=4) + f.write(json_content) \ No newline at end of file diff --git a/tools/sample_clickmaps.py b/tools/sample_clickmaps.py index dab36ad..95b5115 100644 --- a/tools/sample_clickmaps.py +++ b/tools/sample_clickmaps.py @@ -8,6 +8,9 @@ clickme_data = process_clickme_data(clickme_data_file, True) total_maps = len(clickme_data) total_numbers = {} + target_img_names = ["ILSVRC2012_val_00008676.JPEG", "ILSVRC2012_val_00009263.JPEG", "ILSVRC2012_val_00009305.JPEG", "ILSVRC2012_val_00013865.JPEG" + "ILSVRC2012_val_00021166.JPEG", "ILSVRC2012_val_00023616.JPEG", "ILSVRC2012_val_00024753.JPEG", "ILSVRC2012_val_00027647.JPEG", + "ILSVRC2012_val_00034111.JPEG", "ILSVRC2012_val_00038455.JPEG"] for _, row in tqdm(clickme_data.iterrows(), total=len(clickme_data), desc="Processing clickmaps"): image_path = row['image_path'] image_file_name = os.path.sep.join(row['image_path'].split(os.path.sep)[-2:]) @@ -28,7 +31,9 @@ sampled_names = [] for img_path, number in image_paths.items(): numbers.append(number) - if number > 20: + img_name = img_path.split('/')[-1] + if img_name in target_img_names: + # if number > 20: sampled_names.append(img_path) sampled_img_paths[cls_name].append(img_path) numbers = np.array(numbers) @@ -51,7 +56,7 @@ sampled_clickme_data["image_path"].isin(allowed_files) ] print(len(sampled_clickme_data)) - sampled_clickme_data.to_csv(os.path.join('clickme_datasets', 'sampled_imgnet_val.csv')) + sampled_clickme_data.to_csv(os.path.join('clickme_datasets', 'bottom_imgnet_val.csv')) \ No newline at end of file From 20d3cc77d255bfb71af320df739fbad96728d40a Mon Sep 17 00:00:00 2001 From: PPPayson Date: Sun, 5 Oct 2025 15:24:28 -0400 Subject: [PATCH 06/16] remove print --- ceiling_floor_estimate.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/ceiling_floor_estimate.py b/ceiling_floor_estimate.py index c52f10e..7c8b7cd 100644 --- a/ceiling_floor_estimate.py +++ b/ceiling_floor_estimate.py @@ -423,7 +423,6 @@ def compute_correlation_batch(batch_indices, all_clickmaps, all_names, metric="a for i in tqdm(batch_indices, desc="Computing split-half correlations", total=len(batch_indices)): clickmaps = all_clickmaps[i] img_name = all_names[i] - print(img_name) level_corrs = [] #TODO modify for speed up @@ -537,7 +536,6 @@ def compute_correlation_batch(batch_indices, all_clickmaps, all_names, metric="a level_corrs.append(rand_corrs) # Free memory gc.collect() - print(level_corrs) batch_results.append(np.asarray(level_corrs).mean()) # Integrate over the levels all_scores[img_name] = batch_results[-1] return batch_results, all_scores From acf8a9d4d5f8a857172e4e7595ca8041c172edd7 Mon Sep 17 00:00:00 2001 From: PPPayson Date: Sun, 5 Oct 2025 15:25:10 -0400 Subject: [PATCH 07/16] Revert sampling code --- tools/sample_clickmaps.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/tools/sample_clickmaps.py b/tools/sample_clickmaps.py index 95b5115..e064bf9 100644 --- a/tools/sample_clickmaps.py +++ b/tools/sample_clickmaps.py @@ -8,9 +8,9 @@ clickme_data = process_clickme_data(clickme_data_file, True) total_maps = len(clickme_data) total_numbers = {} - target_img_names = ["ILSVRC2012_val_00008676.JPEG", "ILSVRC2012_val_00009263.JPEG", "ILSVRC2012_val_00009305.JPEG", "ILSVRC2012_val_00013865.JPEG" - "ILSVRC2012_val_00021166.JPEG", "ILSVRC2012_val_00023616.JPEG", "ILSVRC2012_val_00024753.JPEG", "ILSVRC2012_val_00027647.JPEG", - "ILSVRC2012_val_00034111.JPEG", "ILSVRC2012_val_00038455.JPEG"] + #target_img_names = ["ILSVRC2012_val_00008676.JPEG", "ILSVRC2012_val_00009263.JPEG", "ILSVRC2012_val_00009305.JPEG", "ILSVRC2012_val_00013865.JPEG" + # "ILSVRC2012_val_00021166.JPEG", "ILSVRC2012_val_00023616.JPEG", "ILSVRC2012_val_00024753.JPEG", "ILSVRC2012_val_00027647.JPEG", + # "ILSVRC2012_val_00034111.JPEG", "ILSVRC2012_val_00038455.JPEG"] for _, row in tqdm(clickme_data.iterrows(), total=len(clickme_data), desc="Processing clickmaps"): image_path = row['image_path'] image_file_name = os.path.sep.join(row['image_path'].split(os.path.sep)[-2:]) @@ -32,8 +32,8 @@ for img_path, number in image_paths.items(): numbers.append(number) img_name = img_path.split('/')[-1] - if img_name in target_img_names: - # if number > 20: + #if img_name in target_img_names: + if number > 20: sampled_names.append(img_path) sampled_img_paths[cls_name].append(img_path) numbers = np.array(numbers) From f3dfcb9f724b1a44d20fdf9371a18be1de9bb151 Mon Sep 17 00:00:00 2001 From: Jay Gopal Date: Sun, 5 Oct 2025 16:07:25 -0400 Subject: [PATCH 08/16] fix duplicate removal and added oscar-friendly run files --- .../imagenet_val_oscar_max_10.yaml | 1 + .../imagenet_val_oscar_max_15.yaml | 1 + .../imagenet_val_oscar_max_20.yaml | 1 + .../imagenet_val_oscar_max_5.yaml | 1 + .../imagenet_val_oscar_max_10.yaml | 1 + .../imagenet_val_oscar_max_15.yaml | 1 + .../imagenet_val_oscar_max_20.yaml | 1 + .../imagenet_val_oscar_max_25.yaml | 1 + .../imagenet_val_oscar_max_30.yaml | 1 + .../exp_configs/imagenet_val_oscar_max_5.yaml | 1 + logs/log-ClickMe-Processing-13138590.out | 10 ++++++++ scripts/run_exp_Jay.sh | 24 +++++++++++++++++++ 12 files changed, 44 insertions(+) create mode 100644 logs/log-ClickMe-Processing-13138590.out create mode 100644 scripts/run_exp_Jay.sh diff --git a/configs/balance_exp_configs/imagenet_val_oscar_max_10.yaml b/configs/balance_exp_configs/imagenet_val_oscar_max_10.yaml index 2b43c31..f64073b 100644 --- a/configs/balance_exp_configs/imagenet_val_oscar_max_10.yaml +++ b/configs/balance_exp_configs/imagenet_val_oscar_max_10.yaml @@ -44,3 +44,4 @@ processed_clickmap_bins: jay_imagenet_val_combined_08_27_2025_clickmap_bins.npy chunk_size: 100000 batch_size: 14000 save_json: true +filter_duplicates: true diff --git a/configs/balance_exp_configs/imagenet_val_oscar_max_15.yaml b/configs/balance_exp_configs/imagenet_val_oscar_max_15.yaml index 8b930af..af506b2 100644 --- a/configs/balance_exp_configs/imagenet_val_oscar_max_15.yaml +++ b/configs/balance_exp_configs/imagenet_val_oscar_max_15.yaml @@ -44,3 +44,4 @@ processed_clickmap_bins: jay_imagenet_val_combined_08_27_2025_clickmap_bins.npy chunk_size: 100000 batch_size: 14000 save_json: true +filter_duplicates: true diff --git a/configs/balance_exp_configs/imagenet_val_oscar_max_20.yaml b/configs/balance_exp_configs/imagenet_val_oscar_max_20.yaml index 40e5684..8de7dd0 100644 --- a/configs/balance_exp_configs/imagenet_val_oscar_max_20.yaml +++ b/configs/balance_exp_configs/imagenet_val_oscar_max_20.yaml @@ -44,3 +44,4 @@ processed_clickmap_bins: jay_imagenet_val_combined_08_27_2025_clickmap_bins.npy chunk_size: 100000 batch_size: 14000 save_json: true +filter_duplicates: true diff --git a/configs/balance_exp_configs/imagenet_val_oscar_max_5.yaml b/configs/balance_exp_configs/imagenet_val_oscar_max_5.yaml index a7e05e4..b1963b4 100644 --- a/configs/balance_exp_configs/imagenet_val_oscar_max_5.yaml +++ b/configs/balance_exp_configs/imagenet_val_oscar_max_5.yaml @@ -44,3 +44,4 @@ processed_clickmap_bins: jay_imagenet_val_combined_08_27_2025_clickmap_bins.npy chunk_size: 100000 batch_size: 14000 save_json: true +filter_duplicates: true diff --git a/configs/exp_configs/imagenet_val_oscar_max_10.yaml b/configs/exp_configs/imagenet_val_oscar_max_10.yaml index 8c68fa9..4d7818b 100644 --- a/configs/exp_configs/imagenet_val_oscar_max_10.yaml +++ b/configs/exp_configs/imagenet_val_oscar_max_10.yaml @@ -44,3 +44,4 @@ processed_clickmap_bins: jay_imagenet_val_combined_08_27_2025_clickmap_bins.npy chunk_size: 100000 batch_size: 14000 save_json: true +filter_duplicates: true diff --git a/configs/exp_configs/imagenet_val_oscar_max_15.yaml b/configs/exp_configs/imagenet_val_oscar_max_15.yaml index 36d9eb1..58ca953 100644 --- a/configs/exp_configs/imagenet_val_oscar_max_15.yaml +++ b/configs/exp_configs/imagenet_val_oscar_max_15.yaml @@ -44,3 +44,4 @@ processed_clickmap_bins: jay_imagenet_val_combined_08_27_2025_clickmap_bins.npy chunk_size: 100000 batch_size: 14000 save_json: true +filter_duplicates: true diff --git a/configs/exp_configs/imagenet_val_oscar_max_20.yaml b/configs/exp_configs/imagenet_val_oscar_max_20.yaml index c87fb70..5e6b947 100644 --- a/configs/exp_configs/imagenet_val_oscar_max_20.yaml +++ b/configs/exp_configs/imagenet_val_oscar_max_20.yaml @@ -44,3 +44,4 @@ processed_clickmap_bins: jay_imagenet_val_combined_08_27_2025_clickmap_bins.npy chunk_size: 100000 batch_size: 14000 save_json: true +filter_duplicates: true diff --git a/configs/exp_configs/imagenet_val_oscar_max_25.yaml b/configs/exp_configs/imagenet_val_oscar_max_25.yaml index fc70d79..2c7dc91 100644 --- a/configs/exp_configs/imagenet_val_oscar_max_25.yaml +++ b/configs/exp_configs/imagenet_val_oscar_max_25.yaml @@ -44,3 +44,4 @@ processed_clickmap_bins: jay_imagenet_val_combined_08_27_2025_clickmap_bins.npy chunk_size: 100000 batch_size: 14000 save_json: true +filter_duplicates: true diff --git a/configs/exp_configs/imagenet_val_oscar_max_30.yaml b/configs/exp_configs/imagenet_val_oscar_max_30.yaml index afea899..9b683e9 100644 --- a/configs/exp_configs/imagenet_val_oscar_max_30.yaml +++ b/configs/exp_configs/imagenet_val_oscar_max_30.yaml @@ -44,3 +44,4 @@ processed_clickmap_bins: jay_imagenet_val_combined_08_27_2025_clickmap_bins.npy chunk_size: 100000 batch_size: 14000 save_json: true +filter_duplicates: true diff --git a/configs/exp_configs/imagenet_val_oscar_max_5.yaml b/configs/exp_configs/imagenet_val_oscar_max_5.yaml index c172ccc..74ab811 100644 --- a/configs/exp_configs/imagenet_val_oscar_max_5.yaml +++ b/configs/exp_configs/imagenet_val_oscar_max_5.yaml @@ -44,3 +44,4 @@ processed_clickmap_bins: jay_imagenet_val_combined_08_27_2025_clickmap_bins.npy chunk_size: 100000 batch_size: 14000 save_json: true +filter_duplicates: true diff --git a/logs/log-ClickMe-Processing-13138590.out b/logs/log-ClickMe-Processing-13138590.out new file mode 100644 index 0000000..55c3c5d --- /dev/null +++ b/logs/log-ClickMe-Processing-13138590.out @@ -0,0 +1,10 @@ +## SLURM PROLOG ############################################################### +## Job ID : 13138590 +## Job Name : Jay-ClickMe-Processing +## Nodelist : gpu2001 +## CPUs : 8 +## Mem/Node : 131072 MB +## Directory : /oscar/data/tserre/jgopal/human_clickme_data_processing_Oct2025/human_clickme_data_processing +## Job Started : Sun Oct 5 15:35:39 EDT 2025 +############################################################################### +slurmstepd: error: *** JOB 13138590 ON gpu2001 CANCELLED AT 2025-10-05T15:44:00 *** diff --git a/scripts/run_exp_Jay.sh b/scripts/run_exp_Jay.sh new file mode 100644 index 0000000..461ef55 --- /dev/null +++ b/scripts/run_exp_Jay.sh @@ -0,0 +1,24 @@ +#!/bin/bash +#SBATCH --time=48:00:00 +#SBATCH -n 8 +#SBATCH -N 1 +#SBATCH --mem=256G +#SBATCH -p gpu --gres=gpu:1 +#SBATCH --account=carney-tserre-condo +#SBATCH -J Jay-ClickMe-Processing +#SBATCH -o logs/log-ClickMe-Processing-%j.out + + +source /gpfs/data/tserre/jgopal/human_clickme_data_processing/jay-venv/bin/activate + +python ceiling_floor_estimate.py configs/exp_configs/imagenet_val_oscar_max_5.yaml +python ceiling_floor_estimate.py configs/exp_configs/imagenet_val_oscar_max_10.yaml +python ceiling_floor_estimate.py configs/exp_configs/imagenet_val_oscar_max_15.yaml +python ceiling_floor_estimate.py configs/exp_configs/imagenet_val_oscar_max_20.yaml +python ceiling_floor_estimate.py configs/exp_configs/imagenet_val_oscar_max_25.yaml +python ceiling_floor_estimate.py configs/exp_configs/imagenet_val_oscar_max_30.yaml + +# python ceiling_floor_estimate.py configs/balance_exp_configs/imagenet_val_oscar_max_5.yaml +# python ceiling_floor_estimate.py configs/balance_exp_configs/imagenet_val_oscar_max_10.yaml +# python ceiling_floor_estimate.py configs/balance_exp_configs/imagenet_val_oscar_max_15.yaml +# python ceiling_floor_estimate.py configs/balance_exp_configs/imagenet_val_oscar_max_20.yaml \ No newline at end of file From 00f7a0e01cab0958bf1ac80209e110d9caf6e91d Mon Sep 17 00:00:00 2001 From: Jay Gopal Date: Sun, 5 Oct 2025 16:13:48 -0400 Subject: [PATCH 09/16] Delete logs/log-ClickMe-Processing-13138590.out --- logs/log-ClickMe-Processing-13138590.out | 10 ---------- 1 file changed, 10 deletions(-) delete mode 100644 logs/log-ClickMe-Processing-13138590.out diff --git a/logs/log-ClickMe-Processing-13138590.out b/logs/log-ClickMe-Processing-13138590.out deleted file mode 100644 index 55c3c5d..0000000 --- a/logs/log-ClickMe-Processing-13138590.out +++ /dev/null @@ -1,10 +0,0 @@ -## SLURM PROLOG ############################################################### -## Job ID : 13138590 -## Job Name : Jay-ClickMe-Processing -## Nodelist : gpu2001 -## CPUs : 8 -## Mem/Node : 131072 MB -## Directory : /oscar/data/tserre/jgopal/human_clickme_data_processing_Oct2025/human_clickme_data_processing -## Job Started : Sun Oct 5 15:35:39 EDT 2025 -############################################################################### -slurmstepd: error: *** JOB 13138590 ON gpu2001 CANCELLED AT 2025-10-05T15:44:00 *** From 48272951e84acb9b88fab18e0435c3abdd0f6ece Mon Sep 17 00:00:00 2001 From: Jay Gopal Date: Sun, 5 Oct 2025 16:14:32 -0400 Subject: [PATCH 10/16] gitignore logs --- .gitignore | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index 20d99ee..cbf775d 100644 --- a/.gitignore +++ b/.gitignore @@ -18,4 +18,5 @@ jay_imagenet_train_0.1_dimensions.npy *.png clickme_datasets/ *.csv -temp/ \ No newline at end of file +temp/ +logs/ \ No newline at end of file From 3dd17f2e5aa738039e78216089b6ff211d854a49 Mon Sep 17 00:00:00 2001 From: PPPayson Date: Mon, 6 Oct 2025 01:02:14 -0400 Subject: [PATCH 11/16] Fix max subjects default in prepare --- ceiling_floor_estimate.py | 2 +- .../imgnet_configs/imagenet_val_oscar.yaml | 2 +- .../imagenet_val_spearman_oscar.yaml | 5 +- imgnet_assets/jay_imagenet_val_08_27_2025.h5 | Bin 0 -> 3536 bytes .../jay_imagenet_val_08_27_2025_batch001.h5 | Bin 0 -> 3728 bytes missing_val.json | 3563 +---------------- prepare_clickmaps.py | 18 +- src/utils.py | 2 +- tools/find_missing_images.py | 3 +- tools/find_top_bottom.py | 32 +- 10 files changed, 31 insertions(+), 3596 deletions(-) create mode 100644 imgnet_assets/jay_imagenet_val_08_27_2025.h5 create mode 100644 imgnet_assets/jay_imagenet_val_08_27_2025_batch001.h5 diff --git a/ceiling_floor_estimate.py b/ceiling_floor_estimate.py index 7c8b7cd..c57d672 100644 --- a/ceiling_floor_estimate.py +++ b/ceiling_floor_estimate.py @@ -621,7 +621,6 @@ def compute_correlation_batch(batch_indices, all_clickmaps, all_names, metric="a image_paths = clickme_data['image_path'].unique() total_unique_images = len(image_paths) print(f"Found {total_unique_images} unique images") - # Set up GPU configuration if args.gpu_batch_size: config["gpu_batch_size"] = args.gpu_batch_size @@ -810,6 +809,7 @@ def compute_correlation_batch(batch_indices, all_clickmaps, all_names, metric="a # Process correlation batches in parallel if not config['constancy']: + print(f"number of imgs:{len(final_keep_index)}, number of maps:{len(all_clickmaps)}") ceiling_returns = Parallel(n_jobs=n_jobs, prefer="threads")( delayed(compute_correlation_batch)( batch_indices=batch, diff --git a/configs/imgnet_configs/imagenet_val_oscar.yaml b/configs/imgnet_configs/imagenet_val_oscar.yaml index eab86cb..18998e8 100644 --- a/configs/imgnet_configs/imagenet_val_oscar.yaml +++ b/configs/imgnet_configs/imagenet_val_oscar.yaml @@ -22,7 +22,7 @@ image_shape: mask_dir: null mask_threshold: 0 max_clicks: 1000000 -metadata_file: image_metadata/jay_imagenet_val_04_30_2025_dimensions.npy +metadata_file: image_metadata/jay_filteimagenet_val_04_30_2025_dimensions.npy metric: auc min_clicks: 1 min_subjects: 5 diff --git a/configs/imgnet_configs/imagenet_val_spearman_oscar.yaml b/configs/imgnet_configs/imagenet_val_spearman_oscar.yaml index 0da9449..4182499 100644 --- a/configs/imgnet_configs/imagenet_val_spearman_oscar.yaml +++ b/configs/imgnet_configs/imagenet_val_spearman_oscar.yaml @@ -1,4 +1,4 @@ -assets: assets +assets: imgnet_assets temp_dir: temp/imgnet_val.h5 blur_size: 21 center_crop: @@ -42,4 +42,5 @@ output_format: "hdf5" processed_clickmap_bins: jay_imagenet_val_combined_08_27_2025_clickmap_bins.npy chunk_size: 100000 batch_size: 14000 -save_json: true \ No newline at end of file +save_json: true +filter_duplicates: true \ No newline at end of file diff --git a/imgnet_assets/jay_imagenet_val_08_27_2025.h5 b/imgnet_assets/jay_imagenet_val_08_27_2025.h5 new file mode 100644 index 0000000000000000000000000000000000000000..a009b25ce85a43f4d317006fca4041d700305cfe GIT binary patch literal 3536 zcmeHK%}&BV5S|KZ#iCU_7*AYGxR6v5jPaxi7*0x*gSWaVRU7$t`wn>XE#y%=n)m`9 zeGA>~&H!%g2@WK*ZJ6EZ%H84Ql0Q<<_LHoK~>IZu7*@XdHfPDJA%bxK#LsSo~^clj)LWK*z6% z@edI{ln%mU1a@N1KyNOuDh8y+^|!FDjE&d9d2d7U$*(-(o8)x7Qyu|to`Z3Z0y5ExK9hXV1i%z0tx*u z19AARYxREK_@)01`uSqPS>&bxdtiYHyvte34Ldga+TvhSXB)|@=-xgR zwLo>025e5*nKY7d`iHO`8JOtrigir54_e$GxXrL98dAp53xJ$ROj>AAej0<&mj)(r oY(@Isz-K(@c3jb{N&%^to6;*erE>AeDLTh_$2k>Gxg^cNFD`YO`2YX_ literal 0 HcmV?d00001 diff --git a/imgnet_assets/jay_imagenet_val_08_27_2025_batch001.h5 b/imgnet_assets/jay_imagenet_val_08_27_2025_batch001.h5 new file mode 100644 index 0000000000000000000000000000000000000000..25e6e3bcd420dd2f5d637798cb8cd6a5df8e8434 GIT binary patch literal 3728 zcmeHK%}&BV5S|Lgf`BL5LHozBe8OuwC-e$*=0$@pm;{7cs%3TZK<$1h&vl;qHy-ap_# zLpnk_*^vVc{2Sl|`G;sfDA&BLR>6}b`$SO$SuBLu81#Y4zML7p#uW*(2kOX-!lM8?F;}2>H}#4Jky-sc-;>5>>)^Z}lYH_ki})tFAl|7CPf?HL(()t$ zBe8p(P1C2ob!aA^lA8&(PQq_Sm?jv%Q_S3rRfCjOpJ}ixW*aY|K6c-HfOJ5+Q z|7Adh-$tO`uN%Mg-w-@s5+FN44U&)m9a@ZAFJ{;6wCsWJf7(0HCn&axjej(H=j^5YB+gn@Y+M_#^kcyij@9+;1A-)pEDM=t;}UgG@NDSyp@D Date: Mon, 6 Oct 2025 01:03:11 -0400 Subject: [PATCH 12/16] Fix max subjects default in prepare --- .../imagenet_val_spearman_oscar.yaml | 2 +- imgnet_assets/jay_imagenet_val_08_27_2025.h5 | Bin 3536 -> 0 bytes .../jay_imagenet_val_08_27_2025_batch001.h5 | Bin 3728 -> 0 bytes 3 files changed, 1 insertion(+), 1 deletion(-) delete mode 100644 imgnet_assets/jay_imagenet_val_08_27_2025.h5 delete mode 100644 imgnet_assets/jay_imagenet_val_08_27_2025_batch001.h5 diff --git a/configs/imgnet_configs/imagenet_val_spearman_oscar.yaml b/configs/imgnet_configs/imagenet_val_spearman_oscar.yaml index 4182499..6471b2e 100644 --- a/configs/imgnet_configs/imagenet_val_spearman_oscar.yaml +++ b/configs/imgnet_configs/imagenet_val_spearman_oscar.yaml @@ -1,4 +1,4 @@ -assets: imgnet_assets +assets: assets/imgnet_val temp_dir: temp/imgnet_val.h5 blur_size: 21 center_crop: diff --git a/imgnet_assets/jay_imagenet_val_08_27_2025.h5 b/imgnet_assets/jay_imagenet_val_08_27_2025.h5 deleted file mode 100644 index a009b25ce85a43f4d317006fca4041d700305cfe..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 3536 zcmeHK%}&BV5S|KZ#iCU_7*AYGxR6v5jPaxi7*0x*gSWaVRU7$t`wn>XE#y%=n)m`9 zeGA>~&H!%g2@WK*ZJ6EZ%H84Ql0Q<<_LHoK~>IZu7*@XdHfPDJA%bxK#LsSo~^clj)LWK*z6% z@edI{ln%mU1a@N1KyNOuDh8y+^|!FDjE&d9d2d7U$*(-(o8)x7Qyu|to`Z3Z0y5ExK9hXV1i%z0tx*u z19AARYxREK_@)01`uSqPS>&bxdtiYHyvte34Ldga+TvhSXB)|@=-xgR zwLo>025e5*nKY7d`iHO`8JOtrigir54_e$GxXrL98dAp53xJ$ROj>AAej0<&mj)(r oY(@Isz-K(@c3jb{N&%^to6;*erE>AeDLTh_$2k>Gxg^cNFD`YO`2YX_ diff --git a/imgnet_assets/jay_imagenet_val_08_27_2025_batch001.h5 b/imgnet_assets/jay_imagenet_val_08_27_2025_batch001.h5 deleted file mode 100644 index 25e6e3bcd420dd2f5d637798cb8cd6a5df8e8434..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 3728 zcmeHK%}&BV5S|Lgf`BL5LHozBe8OuwC-e$*=0$@pm;{7cs%3TZK<$1h&vl;qHy-ap_# zLpnk_*^vVc{2Sl|`G;sfDA&BLR>6}b`$SO$SuBLu81#Y4zML7p#uW*(2kOX-!lM8?F;}2>H}#4Jky-sc-;>5>>)^Z}lYH_ki})tFAl|7CPf?HL(()t$ zBe8p(P1C2ob!aA^lA8&(PQq_Sm?jv%Q_S3rRfCjOpJ}ixW*aY|K6c-HfOJ5+Q z|7Adh-$tO`uN%Mg-w-@s5+FN44U&)m9a@ZAFJ{;6wCsWJf7(0HCn&axjej(H=j^5YB+gn@Y+M_#^kcyij@9+;1A-)pEDM=t;}UgG@NDSyp@D Date: Mon, 6 Oct 2025 14:37:27 -0400 Subject: [PATCH 13/16] Fix min subject filter --- src/utils.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/src/utils.py b/src/utils.py index 1152c38..6f6b763 100644 --- a/src/utils.py +++ b/src/utils.py @@ -1712,7 +1712,7 @@ def process_all_maps_multi_thresh_gpu( # Preprocess all clickmaps first to binary maps for clickmap_idx, (key, trials) in tqdm(enumerate(clickmaps.items()), "Pre-processing on CPU"): if len(trials) < min_subjects: - # print("Not enough subjects", key, len(trials)) + print("Not enough subjects", key, len(trials)) continue if time_based_bins: lens = [len(x) for x in trials] @@ -1740,11 +1740,13 @@ def process_all_maps_multi_thresh_gpu( # org_number = binary_maps.sum((-2, -1)) binary_maps = binary_maps[mask] # If we have enough valid maps, average them and keep this image - # if len(binary_maps) >= min_subjects: - if average_maps: - bin_clickmaps.append(np.array(binary_maps).mean(0, keepdims=True)) + if len(binary_maps) >= min_subjects: + if average_maps: + bin_clickmaps.append(np.array(binary_maps).mean(0, keepdims=True)) + else: + bin_clickmaps.append(np.array(binary_maps)) else: - bin_clickmaps.append(np.array(binary_maps)) + print("Not enough subjects", key, len(binary_maps)) else: # Get max count then do thresholds from that @@ -1801,8 +1803,6 @@ def process_all_maps_multi_thresh_gpu( # continue # Only add to tracking structures if we can successfully process this image - if len(bin_clickmaps[0]) < min_subjects: - continue categories.append(key.split("/")[0]) keep_index.append(key) final_clickmaps[key] = trials From d28edaf0a73b4acac90bd4dc0da57686664dda36 Mon Sep 17 00:00:00 2001 From: PPPayson Date: Mon, 6 Oct 2025 15:39:35 -0400 Subject: [PATCH 14/16] remove redundant print --- .../balance_exp_configs/imagenet_val_oscar_max_10.yaml | 2 +- .../balance_exp_configs/imagenet_val_oscar_max_15.yaml | 2 +- .../balance_exp_configs/imagenet_val_oscar_max_20.yaml | 2 +- .../balance_exp_configs/imagenet_val_oscar_max_5.yaml | 2 +- configs/co3d_configs/co3d_train.yaml | 2 +- configs/co3d_configs/co3d_train_oscar.yaml | 2 +- configs/co3d_configs/co3d_val.yaml | 2 +- configs/co3d_configs/co3d_val_auc_oscar.yaml | 2 +- configs/co3d_configs/co3d_val_spearman_oscar.yaml | 2 +- .../co3d_constancy_val_oscar.yaml | 2 +- .../co3d_constancy_configs/constancy_ceiling_auc.yaml | 2 +- .../constancy_ceiling_spearman.yaml | 2 +- configs/exp_configs/imagenet_val_oscar_max_10.yaml | 2 +- configs/exp_configs/imagenet_val_oscar_max_15.yaml | 2 +- configs/exp_configs/imagenet_val_oscar_max_20.yaml | 2 +- configs/exp_configs/imagenet_val_oscar_max_25.yaml | 2 +- configs/exp_configs/imagenet_val_oscar_max_30.yaml | 2 +- configs/exp_configs/imagenet_val_oscar_max_5.yaml | 2 +- configs/imgnet_configs/imagenet_co3d_val_oscar.yaml | 2 +- configs/imgnet_configs/imagenet_train_oscar.yaml | 2 +- configs/imgnet_configs/imagenet_val_oscar.yaml | 2 +- src/utils.py | 10 +++++----- 22 files changed, 26 insertions(+), 26 deletions(-) diff --git a/configs/balance_exp_configs/imagenet_val_oscar_max_10.yaml b/configs/balance_exp_configs/imagenet_val_oscar_max_10.yaml index f64073b..103f6b4 100644 --- a/configs/balance_exp_configs/imagenet_val_oscar_max_10.yaml +++ b/configs/balance_exp_configs/imagenet_val_oscar_max_10.yaml @@ -1,4 +1,4 @@ -assets: assets +assets: assets/exp temp_dir: temp blur_size: 21 center_crop: diff --git a/configs/balance_exp_configs/imagenet_val_oscar_max_15.yaml b/configs/balance_exp_configs/imagenet_val_oscar_max_15.yaml index af506b2..78769b8 100644 --- a/configs/balance_exp_configs/imagenet_val_oscar_max_15.yaml +++ b/configs/balance_exp_configs/imagenet_val_oscar_max_15.yaml @@ -1,4 +1,4 @@ -assets: assets +assets: assets/exp temp_dir: temp blur_size: 21 center_crop: diff --git a/configs/balance_exp_configs/imagenet_val_oscar_max_20.yaml b/configs/balance_exp_configs/imagenet_val_oscar_max_20.yaml index 8de7dd0..3cd0703 100644 --- a/configs/balance_exp_configs/imagenet_val_oscar_max_20.yaml +++ b/configs/balance_exp_configs/imagenet_val_oscar_max_20.yaml @@ -1,4 +1,4 @@ -assets: assets +assets: assets/exp temp_dir: temp blur_size: 21 center_crop: diff --git a/configs/balance_exp_configs/imagenet_val_oscar_max_5.yaml b/configs/balance_exp_configs/imagenet_val_oscar_max_5.yaml index b1963b4..144fe95 100644 --- a/configs/balance_exp_configs/imagenet_val_oscar_max_5.yaml +++ b/configs/balance_exp_configs/imagenet_val_oscar_max_5.yaml @@ -1,4 +1,4 @@ -assets: assets +assets: assets/exp temp_dir: temp blur_size: 21 center_crop: diff --git a/configs/co3d_configs/co3d_train.yaml b/configs/co3d_configs/co3d_train.yaml index 2cd19f9..7403a11 100644 --- a/configs/co3d_configs/co3d_train.yaml +++ b/configs/co3d_configs/co3d_train.yaml @@ -1,4 +1,4 @@ -assets: assets +assets: assets/co3d blur_size: 21 center_crop: - 224 diff --git a/configs/co3d_configs/co3d_train_oscar.yaml b/configs/co3d_configs/co3d_train_oscar.yaml index a0f2742..22d3e92 100644 --- a/configs/co3d_configs/co3d_train_oscar.yaml +++ b/configs/co3d_configs/co3d_train_oscar.yaml @@ -1,4 +1,4 @@ -assets: assets +assets: assets/co3d blur_size: 21 center_crop: - 224 diff --git a/configs/co3d_configs/co3d_val.yaml b/configs/co3d_configs/co3d_val.yaml index ccf8ef2..c11d0b1 100644 --- a/configs/co3d_configs/co3d_val.yaml +++ b/configs/co3d_configs/co3d_val.yaml @@ -1,4 +1,4 @@ -assets: assets +assets: assets/co3d blur_size: 21 center_crop: - 224 diff --git a/configs/co3d_configs/co3d_val_auc_oscar.yaml b/configs/co3d_configs/co3d_val_auc_oscar.yaml index ccd5d2f..1243ad2 100644 --- a/configs/co3d_configs/co3d_val_auc_oscar.yaml +++ b/configs/co3d_configs/co3d_val_auc_oscar.yaml @@ -1,4 +1,4 @@ -assets: assets +assets: assets/co3d blur_size: 21 center_crop: - 224 diff --git a/configs/co3d_configs/co3d_val_spearman_oscar.yaml b/configs/co3d_configs/co3d_val_spearman_oscar.yaml index 498580e..1d29b1a 100644 --- a/configs/co3d_configs/co3d_val_spearman_oscar.yaml +++ b/configs/co3d_configs/co3d_val_spearman_oscar.yaml @@ -1,4 +1,4 @@ -assets: assets +assets: assets/co3d blur_size: 21 center_crop: - 224 diff --git a/configs/co3d_constancy_configs/co3d_constancy_val_oscar.yaml b/configs/co3d_constancy_configs/co3d_constancy_val_oscar.yaml index 04e2503..85cb30a 100644 --- a/configs/co3d_constancy_configs/co3d_constancy_val_oscar.yaml +++ b/configs/co3d_constancy_configs/co3d_constancy_val_oscar.yaml @@ -1,4 +1,4 @@ -assets: assets +assets: assets/co3d_constancy blur_size: 21 center_crop: - 224 diff --git a/configs/co3d_constancy_configs/constancy_ceiling_auc.yaml b/configs/co3d_constancy_configs/constancy_ceiling_auc.yaml index 4040c12..a0a70b9 100644 --- a/configs/co3d_constancy_configs/constancy_ceiling_auc.yaml +++ b/configs/co3d_constancy_configs/constancy_ceiling_auc.yaml @@ -1,4 +1,4 @@ -assets: assets +assets: assets/co3d_constancy blur_size: 21 center_crop: - 224 diff --git a/configs/co3d_constancy_configs/constancy_ceiling_spearman.yaml b/configs/co3d_constancy_configs/constancy_ceiling_spearman.yaml index 6ab3bf2..c3916a1 100644 --- a/configs/co3d_constancy_configs/constancy_ceiling_spearman.yaml +++ b/configs/co3d_constancy_configs/constancy_ceiling_spearman.yaml @@ -1,4 +1,4 @@ -assets: assets +assets: assets/co3d_constancy blur_size: 21 center_crop: - 224 diff --git a/configs/exp_configs/imagenet_val_oscar_max_10.yaml b/configs/exp_configs/imagenet_val_oscar_max_10.yaml index 4d7818b..4ab7659 100644 --- a/configs/exp_configs/imagenet_val_oscar_max_10.yaml +++ b/configs/exp_configs/imagenet_val_oscar_max_10.yaml @@ -1,4 +1,4 @@ -assets: assets +assets: assets/exp temp_dir: temp blur_size: 21 center_crop: diff --git a/configs/exp_configs/imagenet_val_oscar_max_15.yaml b/configs/exp_configs/imagenet_val_oscar_max_15.yaml index 58ca953..24d0469 100644 --- a/configs/exp_configs/imagenet_val_oscar_max_15.yaml +++ b/configs/exp_configs/imagenet_val_oscar_max_15.yaml @@ -1,4 +1,4 @@ -assets: assets +assets: assets/exp temp_dir: temp blur_size: 21 center_crop: diff --git a/configs/exp_configs/imagenet_val_oscar_max_20.yaml b/configs/exp_configs/imagenet_val_oscar_max_20.yaml index 5e6b947..741048c 100644 --- a/configs/exp_configs/imagenet_val_oscar_max_20.yaml +++ b/configs/exp_configs/imagenet_val_oscar_max_20.yaml @@ -1,4 +1,4 @@ -assets: assets +assets: assets/exp temp_dir: temp blur_size: 21 center_crop: diff --git a/configs/exp_configs/imagenet_val_oscar_max_25.yaml b/configs/exp_configs/imagenet_val_oscar_max_25.yaml index 2c7dc91..33af46f 100644 --- a/configs/exp_configs/imagenet_val_oscar_max_25.yaml +++ b/configs/exp_configs/imagenet_val_oscar_max_25.yaml @@ -1,4 +1,4 @@ -assets: assets +assets: assets/exp temp_dir: temp blur_size: 21 center_crop: diff --git a/configs/exp_configs/imagenet_val_oscar_max_30.yaml b/configs/exp_configs/imagenet_val_oscar_max_30.yaml index 9b683e9..df4e648 100644 --- a/configs/exp_configs/imagenet_val_oscar_max_30.yaml +++ b/configs/exp_configs/imagenet_val_oscar_max_30.yaml @@ -1,4 +1,4 @@ -assets: assets +assets: assets/exp temp_dir: temp blur_size: 21 center_crop: diff --git a/configs/exp_configs/imagenet_val_oscar_max_5.yaml b/configs/exp_configs/imagenet_val_oscar_max_5.yaml index 74ab811..5b1b9df 100644 --- a/configs/exp_configs/imagenet_val_oscar_max_5.yaml +++ b/configs/exp_configs/imagenet_val_oscar_max_5.yaml @@ -1,4 +1,4 @@ -assets: assets +assets: assets/exp temp_dir: temp blur_size: 21 center_crop: diff --git a/configs/imgnet_configs/imagenet_co3d_val_oscar.yaml b/configs/imgnet_configs/imagenet_co3d_val_oscar.yaml index e5681f2..9837d19 100644 --- a/configs/imgnet_configs/imagenet_co3d_val_oscar.yaml +++ b/configs/imgnet_configs/imagenet_co3d_val_oscar.yaml @@ -1,4 +1,4 @@ -assets: assets +assets: assets/co3d blur_size: 21 center_crop: - 224 diff --git a/configs/imgnet_configs/imagenet_train_oscar.yaml b/configs/imgnet_configs/imagenet_train_oscar.yaml index e796760..7aa40a5 100644 --- a/configs/imgnet_configs/imagenet_train_oscar.yaml +++ b/configs/imgnet_configs/imagenet_train_oscar.yaml @@ -1,4 +1,4 @@ -assets: assets +assets: assets/imgnet_train blur_size: 21 center_crop: - 224 diff --git a/configs/imgnet_configs/imagenet_val_oscar.yaml b/configs/imgnet_configs/imagenet_val_oscar.yaml index 18998e8..750f39e 100644 --- a/configs/imgnet_configs/imagenet_val_oscar.yaml +++ b/configs/imgnet_configs/imagenet_val_oscar.yaml @@ -1,4 +1,4 @@ -assets: assets +assets: assets/imgnet_val temp_dir: temp blur_size: 21 center_crop: diff --git a/src/utils.py b/src/utils.py index 6f6b763..a146522 100644 --- a/src/utils.py +++ b/src/utils.py @@ -1712,7 +1712,7 @@ def process_all_maps_multi_thresh_gpu( # Preprocess all clickmaps first to binary maps for clickmap_idx, (key, trials) in tqdm(enumerate(clickmaps.items()), "Pre-processing on CPU"): if len(trials) < min_subjects: - print("Not enough subjects", key, len(trials)) + # print("Not enough subjects", key, len(trials)) continue if time_based_bins: lens = [len(x) for x in trials] @@ -1745,8 +1745,8 @@ def process_all_maps_multi_thresh_gpu( bin_clickmaps.append(np.array(binary_maps).mean(0, keepdims=True)) else: bin_clickmaps.append(np.array(binary_maps)) - else: - print("Not enough subjects", key, len(binary_maps)) + # else: + # print("Not enough subjects", key, len(binary_maps)) else: # Get max count then do thresholds from that @@ -1783,8 +1783,8 @@ def process_all_maps_multi_thresh_gpu( bin_clickmaps.append(np.array(binary_maps).mean(0, keepdims=True)) else: bin_clickmaps.append(np.array(binary_maps)) - else: - print("Not enough subjects", key, len(binary_maps)) + # else: + # print("Not enough subjects", key, len(binary_maps)) # Skip if we don't have any valid bin_clickmaps if not bin_clickmaps: From 6e5d18575ec19feac1d6e0c616bebf8076481c5f Mon Sep 17 00:00:00 2001 From: PPPayson Date: Wed, 8 Oct 2025 14:13:54 -0400 Subject: [PATCH 15/16] Change from list to dictionary in processing --- ceiling_floor_estimate.py | 13 +-- ceiling_floor_estimate_large.py | 8 +- configs/debug/bottom_10.yaml | 2 +- .../ILSVRC2012_val_00005143.JPEG | Bin 17583 -> 0 bytes .../ILSVRC2012_val_00007012.JPEG | Bin 18094 -> 0 bytes .../ILSVRC2012_val_00011048.JPEG | Bin 21248 -> 0 bytes .../ILSVRC2012_val_00014660.JPEG | Bin 19713 -> 0 bytes .../ILSVRC2012_val_00017247.JPEG | Bin 19597 -> 0 bytes .../ILSVRC2012_val_00022850.JPEG | Bin 15641 -> 0 bytes .../ILSVRC2012_val_00027083.JPEG | Bin 14212 -> 0 bytes .../ILSVRC2012_val_00029849.JPEG | Bin 25325 -> 0 bytes .../ILSVRC2012_val_00031840.JPEG | Bin 21394 -> 0 bytes .../ILSVRC2012_val_00032692.JPEG | Bin 17266 -> 0 bytes .../ILSVRC2012_val_00039433.JPEG | Bin 15079 -> 0 bytes .../ILSVRC2012_val_00042007.JPEG | Bin 17297 -> 0 bytes prepare_clickmaps.py | 4 +- src/utils.py | 81 ++++++++---------- tools/find_top_bottom.py | 71 ++++++++------- tools/sample_clickmaps.py | 2 + tools/validate_size.py | 26 ++++++ 20 files changed, 117 insertions(+), 90 deletions(-) delete mode 100644 jay_imagenet_for_co3d_val_0.1_images/ILSVRC2012_val_00005143.JPEG delete mode 100644 jay_imagenet_for_co3d_val_0.1_images/ILSVRC2012_val_00007012.JPEG delete mode 100644 jay_imagenet_for_co3d_val_0.1_images/ILSVRC2012_val_00011048.JPEG delete mode 100644 jay_imagenet_for_co3d_val_0.1_images/ILSVRC2012_val_00014660.JPEG delete mode 100644 jay_imagenet_for_co3d_val_0.1_images/ILSVRC2012_val_00017247.JPEG delete mode 100644 jay_imagenet_for_co3d_val_0.1_images/ILSVRC2012_val_00022850.JPEG delete mode 100644 jay_imagenet_for_co3d_val_0.1_images/ILSVRC2012_val_00027083.JPEG delete mode 100644 jay_imagenet_for_co3d_val_0.1_images/ILSVRC2012_val_00029849.JPEG delete mode 100644 jay_imagenet_for_co3d_val_0.1_images/ILSVRC2012_val_00031840.JPEG delete mode 100644 jay_imagenet_for_co3d_val_0.1_images/ILSVRC2012_val_00032692.JPEG delete mode 100644 jay_imagenet_for_co3d_val_0.1_images/ILSVRC2012_val_00039433.JPEG delete mode 100644 jay_imagenet_for_co3d_val_0.1_images/ILSVRC2012_val_00042007.JPEG create mode 100644 tools/validate_size.py diff --git a/ceiling_floor_estimate.py b/ceiling_floor_estimate.py index c57d672..eaaf3f6 100644 --- a/ceiling_floor_estimate.py +++ b/ceiling_floor_estimate.py @@ -421,8 +421,8 @@ def compute_correlation_batch(batch_indices, all_clickmaps, all_names, metric="a max_kernel_size = config.get("max_kernel_size", 51) blur_sigma_function = config.get("blur_sigma_function", lambda x: x) for i in tqdm(batch_indices, desc="Computing split-half correlations", total=len(batch_indices)): - clickmaps = all_clickmaps[i] img_name = all_names[i] + clickmaps = all_clickmaps[img_name] level_corrs = [] #TODO modify for speed up @@ -443,8 +443,9 @@ def compute_correlation_batch(batch_indices, all_clickmaps, all_names, metric="a rand_i = np.random.randint(len(all_clickmaps) - 1) if rand_i >= i: rand_i += 1 - rand_clickmaps = all_clickmaps[rand_i] rand_name = all_names[rand_i] + rand_clickmaps = all_clickmaps[rand_name] + if metadata and rand_name in metadata: native_size = metadata[rand_name] short_side = min(native_size) @@ -656,11 +657,11 @@ def compute_correlation_batch(batch_indices, all_clickmaps, all_names, metric="a image_output_dir = config["example_image_output_dir"] os.makedirs(output_dir, exist_ok=True) os.makedirs(image_output_dir, exist_ok=True) - os.makedirs(os.path.join(output_dir, config["experiment_name"]), exist_ok=True) + # os.makedirs(os.path.join(output_dir, config["experiment_name"]), exist_ok=True) # Create dedicated directory for click counts - click_counts_dir = os.path.join(output_dir, f"{config['experiment_name']}_click_counts") - os.makedirs(click_counts_dir, exist_ok=True) + # click_counts_dir = os.path.join(output_dir, f"{config['experiment_name']}_click_counts") + # os.makedirs(click_counts_dir, exist_ok=True) # Original code for non-HDF5 format hdf5_path = os.path.join(output_dir, f"{config['experiment_name']}_ceiling_metadata.h5") @@ -896,7 +897,7 @@ def compute_correlation_batch(batch_indices, all_clickmaps, all_names, metric="a all_data = {} for i, name in enumerate(all_names): - clickmap = all_clickmaps[i] + clickmap = all_clickmaps[name] img_idx = int(name.split('.')[0].split('_')[-1]) zoom_level = img_idx % 3 depth_path = os.path.join(depth_root, f"depth_{name.replace('.png', '.npy')}") diff --git a/ceiling_floor_estimate_large.py b/ceiling_floor_estimate_large.py index 890e077..d4c1041 100644 --- a/ceiling_floor_estimate_large.py +++ b/ceiling_floor_estimate_large.py @@ -151,9 +151,8 @@ def compute_correlation_batch(batch_indices, all_clickmaps, all_names, metric="a max_kernel_size = config.get("max_kernel_size", 51) blur_sigma_function = config.get("blur_sigma_function", lambda x: x) for i in tqdm(batch_indices, desc="Computing split-half correlations", total=len(batch_indices)): - hd5_name = f"clickmap_{str(i).zfill(8)}" - clickmaps = all_clickmaps[hd5_name] img_name = all_names[i] + clickmaps = all_clickmaps[img_name] level_corrs = [] if metadata and img_name in metadata: native_size = metadata[img_name] @@ -174,8 +173,7 @@ def compute_correlation_batch(batch_indices, all_clickmaps, all_names, metric="a if rand_i >= i: rand_i += 1 rand_name = all_names[rand_i] - rand_hd5_name = f"clickmap_{str(rand_i).zfill(8)}" - random_map = all_clickmaps[rand_hd5_name] + random_map = all_clickmaps[rand_name] if metadata and rand_name in metadata: native_size = metadata[rand_name] short_side = min(native_size) @@ -549,7 +547,7 @@ def compute_correlation_batch(batch_indices, all_clickmaps, all_names, metric="a # print(f"Reducing parallel jobs from {n_jobs} to {adjusted_n_jobs} to prevent 'too many files open' error") # n_jobs = adjusted_n_jobs - Process correlation batches in parallel + # Process correlation batches in parallel ceiling_returns = Parallel(n_jobs=n_jobs, prefer="threads")( delayed(compute_correlation_batch)( batch_indices=batch, diff --git a/configs/debug/bottom_10.yaml b/configs/debug/bottom_10.yaml index b122784..c475f45 100644 --- a/configs/debug/bottom_10.yaml +++ b/configs/debug/bottom_10.yaml @@ -1,4 +1,4 @@ -assets: assets +assets: assets/debug temp_dir: temp blur_size: 21 center_crop: diff --git a/jay_imagenet_for_co3d_val_0.1_images/ILSVRC2012_val_00005143.JPEG b/jay_imagenet_for_co3d_val_0.1_images/ILSVRC2012_val_00005143.JPEG deleted file mode 100644 index d9000e7d14fbf34c6d130e0da9055681100a8f54..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 17583 zcmeHt2Q-}R+U~2Dh!zr^1R;8~=p{Nqw9zHH(PETfB5DK^j2^wW!4RDwLiFByCrb3* zIg@Yi|3CZx*1y*|XRm$MS?jECX71&g*YiH}+|S%|m+QW6CvF!2Vr6+Hc>oO!4X{8x zf!k?72EfI}#=*wI#lgYB!^6cVAiGO&=MDiS={+JcS}J-vS}GbE1|}{RhDRKXG&HQw zo^bFy<>%+8XAu?`;uYiKE;~io16ycpuU{+FVhlm|Iv{S=)eZU0mJXx_fv+13m8Hizx6^xcSi{Z2`1LVr`V*@8aSqqdyjbi zaLJxWW|TJJG4g5dk()UW;@@ZDUu53@t=b=&{qHH}|1W9wpNjoYy`}&H3^df2hd}~J z0;d6S8Q<$U>o$c zlfX$D)mtDOzL%dpQWB)pxZipU;0NT;LI*IGM&E>{^It*jU+p2K-Gwy{50A)UWO&+e zgJBvpsry(nv-3w}jkcdJZ-MlJwbXY*16YwR+^3DwY7a<#-W`$aQ91GZMyVtODO_Xm zQX}R?)5I?D!!EQK*5?g?8b{`v({bZMLOI-{=0G1Fz`5z6A!fwYAV?P&>ER&f z0$(iqZ(z2NSwhjQHSRMbI#JYT?A|C#-vTWfx4@l%cAq2KTcA@>@(i}PUSc>YQzou% zP1vy`x#;AvdlS%j{siY1AW^vDB0^0YaK8nZ8E%0k_Ml9-C1HnnvM6KCEfCnW+;~BF z3#??*U&+z$)rg^{NzY%!1l$6_#dI+77*U23r>*ein+G`Ix4=4T3sDh1`(!@N3;Xle zI6eoOL^qFWpsXh^w*n@Al!?OsSKG`&?ULxPP5wVQ$*q0@6ZQNvb@_&`(LFUarT*Yo zfaBXRB_SF@r4571poPYB5;5^x;C1(#YqH8(lNZ(2pE)N`7W;urLk{?70?BX7lhHA^ z_X80RVAk|LB*TzS$EL4uG#D`PFP|>Bw!KKj0>r2^v>huQBwAs{#;y0zYCbwbv^ZxR z3$&PcI+iP~MZSb@>B>b(6>ZKFgTH#&pFVpi7=hcwl4h#CE5IKJOHgw3km)GqT45eC zeyZ6WrSv8j7w5|@Aimk3Dn84a$t-FQHEx)#Jg!L`TM}LprxPTQRE|Rnlr-48rq>=h z;#BN6qcXH$^m^DwLlc%aOxLJKK96})EcQ2RejXCv&%%%Qg4l5`9gPu<`_DjGIOQ(Wk$1aLRib+B8I4gU@hF1 z`^Ml<)^FeY$G4)5CyN%J6WWQ6+MR@EPAHnTU@k z<@u$q-lDPnh-UprnY~w`r8_^q;spOCos?EG)E|%Tc*w)04fOSD+uDHV;RbYHVkh8 z)20ng&iJFe4BnncUz5A1NTv$KIP!Abq9%!yGsHieu$H}})@W<9mh>0Z1jWQzye%lP zSFi<%GHUQPkh8>qWtoOb-DR~GLZSGzSdq@W?8=-%<~gDiBQ4LV<4v@nT8fNFS6;o> zu0`ER4z)|4@KYc0O=QSDp%BW>cEuvokopgyH#_6LkyR)1nc$ti1>`s#LG=D8hbOyt zdL6*oc%(V@(_`o?VMXOF%EA@>f1lGo5yHn$zaz%d)kH?RY|wS`w9xXbd?}V!wRT^O zo5&6ndnFJ!mznChVW+moUJM_Thi#Rzya*?P_7zqNUVS(bdVf#FZsmcf&Gro^X)mQrF!9Kb~DW< z1{5Vr!3g|JisKlxJXU0tqxdg7`!rnLzeS-TS{J6 z7{noT{r(aJJxd%~jFGKN%BAr&oEG0G|8+lNZd#ey)G3Oipp)So;_q^V`Vq1hL`8X( z89dFgdJlwSzT)HhNza3mQShc{QmV+2%PcHbCAoSpz=Ygk={zqLEW6dZUq|3_Mub#3 zshgaBxKGSPd5^|=d>HBWHRXPsz)29oi2nF(VmOg z%Lj4J+GyL+yBjZvOy4~z{T4zHOujsY+q~B3SbL1n`_S6!LflGX{p1Y^H^YX-gYX8ZRv0w@7xA#F(6A~G@i1QAy%+pw<>t2l9ppx`XB37sMGZB6 z4-)~_Oc?NA{Y0VjpB+hoBq^IDsf0)-jD1r4bta4z-OhEMp*Tssnu11G(o6zkY?9_z zl{CZ>n=k?iD6KO$ZBQV$au6!Go~q0fDL^y0H23INi#?o3c2IT`gNG`OIO>ipTUJj& zwavspmA|kemdcr5SN_t>UnA?v)6^U=Z^JE!`p)(?-YmmYx;S`dJG0=iM$`n|sG5q# zhCz;BE;)A9i({}E3vG{l}VuC8LvDw>s*UnrI;&8jIjG_v1` zYHvQC>T`Z$-q|vqqOcP=W0-I7{aa8mdq(6bes`Vf_u<@mtt@T?Yn|fH$HpZZV}T<= zOrz*L5beREz0bU)FxG6n-4c8lwkr0Ys2P!+g_q>Yeia~DvU?QYZ4Pet9AWE>ZT61 zkG{fec7%1UXm$WX9Q)M@ZvG(aFzts7+cPew>TeD=UlI}aeY8qc((hu+(8dJtnixyc zPm5Sg?tvmooX?Uo6u!jaA(IzcTF~Qngf^4+HK$&|tCE}Z&%1o(`8Ks3`0M;=90$A> z3AbC~wAws2#~Di+q-jG>%3GKq92j(_k-jV}dE&ol7+KdDixS4(S|yU2%2_edgkvi( z&Xb3_zVj$%{1)W6<xpye_>;cn#h2PFvJ1UOO}klf~$pt&wjB;L2ads%rJK z^q&<=xlF7b6%mmbHN;evRlEzRI-5|$9>b~W7qhCiU6*f|oOTG(85hg83Nf6qzx@b{m9#cln3I_y;glpCVS{Z7{5U+=ylUE`0Omanb zpXfS(%jZZ7^wacg7!{<6Z-EKkHB^)q(Dr6Ry}aDy$aKq~O>$%}vAP*Qk`L)Ch3X2} zNSGR)q!fImvyx!b7}-ENDz@EqFf_jZ!m8GWV&70O)ULaA;FGQdZ-b;Yd*-{+ObAXpi32 ziagSLckqiB#)=Sm{`cI2sMPDh?_4dYaf{Lcm8->}a9S_<+5uCJkQb?;&#mlhAl65*7freGW*0;c0Urb>GHwDUb@DY%3{&~}A zbdo(^0!r$WP#k@RdV7*z$E|k@R4Hs43C#+>N{BTMy__De_|R2Yhv`Vc@^g2nwk~8y zjoawlwDWq#3mPuHFs{n<*hE~;rEPqxh^ZopH~&0x7_HAkGHA2aS(UZ!l-Jzkmth#) zD`w>i4tM$Q7Kg*f1#x?vEryYl#gqz3qJ-q|y=$NLN=F+_>TIy1sZIpRSKb;-AH36E z<5Csy(dIU{S@tkC?^FAf{(1O|qYLdPtm}A>!V^{+>XcoA)pt%`Z($ySm^!{zg^Zl5 zKGcMXT#}4aw!W$z`t-_B?)7uO*w20y`CAI!?8=t@!Y?3SL(narzFL%&-qurU=u*_O zeLS)hMT48wC1gHuRdbUiakN=}H@VwgwCscYOJ1N)0YIhn#|5bw@0dR(c2o3}r)04J z_eO@_@Nafc9Bo>^c`CNJ`sRkbP3f9l4(G9PuZm`Fr(-dG){8rVw30L+x+;X=nEXpz zMj%REy8jj^_{x5%8w{kT{(@Pr4lryFXfb!~Wi->%X z!tiC(6aKGZu==Vl>XeqCuH|X5#Fo9|mPiknVh)$sp5BF3U2qRoQ~}xdb|d)b={)7$ zk)v``PfHG4*j9E2nKj)9s(T@jXP{bmX*ix!v=w^4`WrpE9*|3C#}3ywELxp?p8_E9pcAm^Cs}Z>GXxS4aFA zbgAKv$}p1A|4xQ|h?A~pc~tE$J|n@P+{UEuQ=di>kK#u`0=NK0KL&XfezN0Fb7hr6 z8Zaw=?0OO|`0kTz{_o&iD$iUhU=mf(!C7ws`c>Or&_9g-s65Gomeo4_u9f+X=&g|*sYFMwQ2<2Qs@3C}c zI(wLH5_z}L-Paf=rVjiQx)!0jt%K7Gd%*r%RSG17yV{=w8h4*LSbN2+#{vxPX=GaL0q%l0+UT>?mz=1 zIdO;tqH9$uM_uxGYHG=IW(!xp--}SyqpGmz&Si@;k_z!iXu$NPbvkX~(AE3=c5tS6 zV^ZbHcQ3k6x4^iVZsNqm`)_O8SUpL)VY8jvHxR?;vr0*0?J|p(Iki=~v?9+&bQJ_( z^UXy7Z!Wev#OQSIaP{8(Y4TnFWp)*C2R6ZRcOZ1o6MkqWow*cM( zgSWuL^D|*5>UvKUG?bJZp}zbMILdRC_N+ys8bg%}LI&T0V#AEO+7eY@G3vYeOUg1& zi)Oey%9;^;ayt?xmD;`0V+HkBZuuauN&(C;@awIxLibLjjV~KJ_K;d^DOnbJErjGf z5k0kPn>@1S4%79pHQ~8o|_fj7n4AV!oclvU_vM0es zBq{@38|#FJ1)90 zR~5XQTIFoB+}K}sbXNr$ARR@9u`2_8SL_3MayUqG9H)PKt+5zu+rmibGn+r6T?%=4 z@e++p**1|R%B*IB)c(t-WPq1N1g1OMcJ9}Af0p?Wm?1Vw&=aOW|+1Ati>Y* z``6Ayw}43{=g)&oyz05qB<0S#4Qehe5!0>3VCPwkm4qI9>9*O9F5FYf{zYSNc$Sa# z2E@=2WO>=+1510HV4aVfQ!59#hx78ig7N^)TOR0G#2DLZWbm19ASNVSG8N3#O_h1c`F!1Y4VUD)LktCNfi@tfG@sg52qf8RMcD&;DQJlC4{|FU zJWMzA1y>Yoqe#D*IK78AfPYK`7##*4%Q4v%X^`G=|D|A?wgtJUUen%QVR8`2!0UN! zodeZlo9T`iCAkil1YK=Nw583x=_?vE{=r7gd~>OF_j5KkEOB+it%zEBfAxGcQpB=*YZ>);`mTvwz1{=oyDN1XcMWSB&qhuN;otsz^ zPvdg6zg%Bno(OR2=Ge;D7-ZSj;}1(8jHXJkK|^z&8N|cF0&)0Fa+IdY(Vxz0z2F6Z zX;I2`m&>`UO0COCUPx09vL_*CYg2}Gf%#fm%kd5YXU(7fAGKG3o@GJ~x(P$-jYe|t0x*K-X^l67$(-wBR+F9L9)%&EpT?`jS zwRM^spz}M2Lp7Y-Dp75y?hLfH1R$!Qo>9zU_Vz_r0k=LkU=G=Z)&Fo@ZQ~Kq z7KALige$zV4bp80pPShcs}|Gca1HA)q|lN%Nh8|`)+6oP!6_?w)dTzG%jxuk1y}%C zMcz*-G?6YgRHM@~^Qa2{z6{TQRF(OnR{Xj!F0)Pvf1GoIv6%V>%j! zw9j6F_-(3@VtYF|31RBK7n(GhWA^P4Vj`KJs=u-8PGfCgUG94@;By7{++c3jT!b8-yvUa=)5Z6a6c#(ZFAQpHZY)wcB|_QYP#V{rysc?4;1bA zG}Ch_(`7v4HB_a<2+x7uPt6AJAt+kxt_uAJTiXy~kQ%83%mp`E^@9?n5cH-_V7PD) zw=*IpVG&=jGc@#+cGIMVJVZNr=?j;Nk3GXoHYxscDqgE=a3;T|2Tp@k#am0cUnU`6 zq>;E5ExvDUdqP9QJqm50bjvx~OdH(^R9R{;Smk#Syux};# zcT5^xwxm@tTu?Lu5#ZeFqCK=y6yIxOoa>@Dl%daOY-hf)SHKsTFsQGd>|%(2nN@7i z_WY^gp0cu8?WPQ3(Mrf$f0o#|Sh`4fc&+%OO*ei|;>=nOSj|9hqkEIJ&?qwm2HDVz zX_b2jfbw#4db)-MtolYBJw?V`k%@6oGIKM=u_zduS~?101WYdX4?wwZOzzk+es9xO@3vopKUJ=B951N0V4}Th?wFPL()=ef2Xn&$; zQ%BDMSijRtOOdqUVm!)OKv3kdnU@%Nkf9l{>Z2VMJMVw)i+@aDzp_@sv?egNfNAy) ze>r!Fuh(hW(jxNSMBLp%$M3vr2u9e#m0f<4jkWN1?aeb?Yw`X`j#T;AEw=z#3Eh6q zXkV5KZD1LH^#krexs^zLR)kPX-}u0`1nHFhUeW1+MW?%ylj03I1%2_aBNdMj$K*8I z=Do@o1EPVn!*t&OMDa^y zmD{$o7%!fYl7r{j@$a3cQMOkj)dlwGEPp5#z<6hQaG;0k)%{EJ4@41G!^q_pTWnm0 z10Sl{N{Yj~T^-en-FLbJIJf6_xg2vih`bxu_FAE|v9M_=1geHR!FSMOr=GILVk#i&NI+3Y0i=-vx-dFv*&tde58 z+8R35^0XbK=H5x#i5?^9CDJbfM_u|{XWNRBS~ce3of_xfK?P^KC)F&FX_mNq8 zhgJH~{T<@90_*NJav?8$0))vsH%M}dRBiTZp7vf=NBHW|DFLpO;)ljDA>Zz%B49~# z3y~&c#`}!c!nCSjS!eb~J#bEUIy*JSA>_KlJ+(={U?Md(TZGLmAj-Y0`_$yyeXo&7 z8B_=jc~5KSFe&@95=z?A!Lg8RkY2y6K*iI2y72k{`%T}y@rfk$MQWTe?^C5buf^(T2#VGGkhpUaC4#?2UshZOj!f|FfvDPJS&du`1oJIJy7+7Rd5v z+UI204u@EP-62V`?lWX3-N~I-lK$-@g(lil6Z;KG*;)LjLYI+W61!`XkR~I!7-D0X z!z+mwvPTltSV~=ZrH?-g7lb;4^~=DOoT)|T7O2)7*1x}>XPt_-IRVv%)vO%nW-do} zBCr5fvPH!w&A!wu7YUNHl3lfU~=*3tr`ni`0oQhQZHr-D}F9{$+1>Jm1vV6o@ zh<543fn)~ey72gQN&KH)S_pxZdfOzuj%jFMJN>n-Q=Y|yeUKgYGN@AUq)dSfV>L2l zc|$*r3cJ*ndh5~5=Vfo@8?kU7{OT-Q5)Nnn9A_y8%2nn%4cKOXPMrTnXD-0KYkFOsl}o#H3j#6s zje1jhk;sXPK;NlzOJU5kO@qVU;Iv*Vo1dDETtX-u_jrW0P9NR2S;^L+7^6i+>YTy+ zgSfhKBgPH}i^Q>(T0J{4{NXY^tMJM)O=8?k#0m{Ys({z}1 zeB|g~z|TRP>bqIiOa_&2Qjm|6me9D*31ISxe4+&JaG00r6H%78)_+5@WT8#ue;e^*qgecY%3tI!m;FOJHS z6Xb9lWr>$UXwHYnM9KJp++F0p_-n0V7&Cu3!$@2;GZL_ys(txH$yo335MMc)!)@@F z1C8EYy^Fw;tti{VnOKub_OBOc#XTJ4S17 zSm=Ua35BgN<@G(Rk9$Tfsc+XO+a68vMM2VT8lXkqOlfNV6sxbqr2v1=i?y*DfT6l~ z`TiTGxCnr+X^lR`_4&cDreZI$S)?pFJnWp#E@aLiD}@$z23JWP=%}VM;X7!=^nov$ zNEe;OiMzDqDxOl?xU9ThW}<7%QC$Mbf0&eGOGxo$E#k%TW=~c$|6(bXE|RLWmU=Zd zSXs$+YLLQzPxVbcz2HylS4=#@8|$^pSKu{0CGL!e6qrJBKN+c~;ZsTKF^ayENPQ($U7jcI?? zeuOAFes1+f6q|g}+?6r!IrYIt8WN&lkk}@VmBoe^#)7$ZcUqW+!1^%f!Rdpc0$xwk ziGlg-^QxqGL zA84 zH{|@bl0}bWzc2O?9i($lB!yCt*r_B)vubnrj*8y9SI4KyAlmVCByu{j1agH>CapC}cdpD7eJ zwdV=3i-{FibTW;4Onx%6OT#5JCl}*wF}-08pJKa!`}SqlbYfSXgsH%?rLe10rP0@N zFL_>_8Ldg0oUOY|vG@iK+lD2Mc)ik0rgHBM60J*AXm|3n_}j>IgeUMDou;LW-X(Yn zSR+2FK}$hWR3$P(7)Vpkc>%T06trNYO@vGW)xiv8tjsaD4_xA`xT#~@O+`&hg^xjG zfa=Ws`h!LF55!@K)<0D!MUsCiVfJ(O`8wjZP@XFCKQf;G?k~-H9=^0%I(p(38O;$b z_*$Y;{%21aE~8jl{d+8cIGVrB>awtTq-T;03)fHAm+hG?aIj0|foB$OETgQ%q#M)F zTWda$eC11+AQmuHgIm<-b}|_et857CE6q zKTEBK$QenW>jV&Z`*h`r2$s^RY`E~ZKC^HZxwyEr=1+``lC9(nr?XLG!d;0y?wCc9 zitrPgPj=e8TClIKTI+N+r@_cKQVo6!R+JL|Q|U+7;W%Nn%)Bc2zm92E0pOih=06 zt`B%`tE;Z~m>=!N{3o}#xJMy`{z?1Bvs2LcN6-h)baTJZdpGo9@6-YaBB$gZzaEN= zV~kKLY7qVYjlN9oV9A9CZW>X$nY`2f;ucWeuy^g)j@04GWpM2i@zx1lpEqTl7_F4D zG768)zpGKLqn5xja$IO@o3D8SDK($K)+CbASv05l`1tCsUcMtEN8R{tiSh&~PJGiB>vbL?Pc7|E0-3aeDF%+FwKS%MC2J<*g;L z2tK6YJoN5>_~vJ0g}IdLtX+*QGl*E4F=61&{Q^~|r(K$(BU$%X$=yiyKp$ythDWvQY&!T<| zAG#Dvba(|?kkd}>!kEBlH$uFyBx=^^9aZh!w|;7wi))j z%TeLVgc$l{UFAfR6ognO;B3zLRy({wWQ1s;&fWA-=nDKe@he8)*cO+YO1$IakMgq8 zhvt5%wh^^)LoEK!EGd_13nxn|YKGMgMuGFU;q1|AO(cBxsms8AtGxm5n3yai;ARK0QaZx;SaB^~T-aEab#7 zeSYWQMu0{kHqyOlhgokZ&(zmh&xLpH61tYdo-WkEHOI@V2S-Z02QBw;QWK|stXtW0 zOTPu;bRKzVHYLQGhrwugaG2E_c*K59jV?X{jpOTNPI3>R8}wL|2l(#-N|LC9jD){1AJxVbinwCC<1W@V zD+kyc`m}$&k?hOKM>a0c`p~aw4*bF8_&)^K|AX+#wIn*EAw@VhSMkjpI?S35qFsH4 zq@Es2lEe6t9j){?lG=ZVf&ISr?<21@x2fSQYjnK`eWaFhjTn-Qi>LP(UVxifT(53Q z5o+URXgfUtPvoz~NRvclhht0Vjg$%tt+S$a>5TA3XovcXP-S?31=m+R3(fS31UNid zC>OkG?c|)K7+#wXWyQ@=rcfzHP7>-QNV6Yhj}G8|XN!_v9RazGqeL`VOiI7Jc+? zh}v`m^mZpXOI8GKGM^ZocdX#PKta@F5fW zelR5OuWcOpwX|=}8FynYm-hu%%k#-lSr@@YU+)x~nqY%LedPnb88>o+Zh2Un(kxSC zPj7AYa>=5>G*}Rmph<}(3dco8sV=m?rV35x`%g9716B_?rcbMeD+o>bw^h87Z0}2+ zqG@Ab3Yibn)*$Zlh-!AO25ONO(*AfDF0^|#>>JUUT-@W@E_|^XK`eF33E@e^f?;8p zJ7QKFMQ=>yrOt9WtoXm!a$|FXw2CN3+4MNy?SjG$Jo<$x)?g-53UBcINhl+exq)gU z?fC46J;|{LCr9nVd1k?d> zC^=C?-8P@XJ$Y}m0LqGNB-}e*;BPiHW z)+kNcbX4g25+@ebu=3k0xyBSoySzqK+tgV%WJG~J>ko@=f%)~uvsq{kc}X>5W_((E z$Me+{D?REQ2W0ebfyP0f{jZXXQfIn1{#5ju(HeR5ifSM~_skZjNu`by=Xv&Yj zmi_mMNU92OFosr>BY1%)gY=4daYYO|< SdHi)Af7!slZUY#%lm7!MK;o|e diff --git a/jay_imagenet_for_co3d_val_0.1_images/ILSVRC2012_val_00007012.JPEG b/jay_imagenet_for_co3d_val_0.1_images/ILSVRC2012_val_00007012.JPEG deleted file mode 100644 index 54e554a06d2c9023c44bdf9982fa315be95e156a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 18094 zcmeHu1yo#1*6zV2NU#L&Ai-UNYl1^?mtX-Jf}hN zCuCGkkAmLy&LeKUr5L(sA%XIm{{2N5Es@kk(9lp(5m);m`TcyxSndUk$s zd3DDZ5`gk2w*Jc5@Ax7>@P&+uih_!9#}^W^2cn@6prX-mqZ3N0W0*M;(eeag5=+Nt zSGHr(@oM}eF?Sioen8K+{P^&Wwcj}VpD`BvZ*lfl#{P@182|?b3Gwhy2mlG-{E8_j z5c@yvKXdS(HSj-Q1I$e=MILI%qY9a}ynB-OWEgCQ0CwWkTt1*i!+IuqVFGbn^|RN&*B?BiSD`wdup z6h6EwNi?brABXquN`1NoEKalT5o^Dwy#>Z4rs5V>;rj3zBnL>o4k_<7Tk;oZL1XP}|%V{1pA5_jNIIr)|wwfpRI60@g zcVih5HhM#T-U1gku~$j0N7=W)OyMmM_VyMiGQ0&?KsUwpSG6WRi@HVA=h&>Bx4?3a z#E#A_&>wya*yh5?>EJNO>%xGuV(~KF3O|zqi#eu`_U%i2`BCjMqNvtj<^$%FOV-;ii^S!%U zbS|FZ)FE!6WSA7b)QT8{*xeu;Z-{FfP53@DcoEKr602)G%7HKe|DYk&AUL?=;!jOM z@}7bWav(7_CafH#`vtMv4K}k*Kd4TXj76#SJ$an-fNA1z*0l{vOCEpswfNzTXQy(? zJgwo*gWmJQkN0X4gN3eli^yRil=o)M{ZLEbd`PwlDT&=Q9f~o2BtZpgND4A!S*SS> zMc}DQ0r^wJWOI!V@r|G$4$*%)Af_Ju!(S#ico;6Gc?fqG<3=zdiDq^&4u7V4{}!0$ z*}Mg0_#JdW(GqLV4)Je7I08;IwdGc1W;c(_W7b)z#Mlb+yX69ZEk%59Rnt-9&Wq6&}IQs zZ>XNlVAk6yhqd@O?%m53JeW6#VKQgW*BY2*_lTsu|5bbh2S)-@}W3S^lKgv5cXTD^AWn$x$f0 zAF2JEPqjFO@xfGwM7&&zu?OW9Hjr`eJ>5`!@{4!JA>pbP#vG}uRIdkaObTCO3PGG( zB{C(-oQ0!w9|&}88^4psfL6vVG?bC<+jGb#)2+W#O{rW76r+}iIM3m#+OF_w>{+*J zzm~WrD1Mm&yUWyjy?-$pWSdvO9BO z_Q_`RPoT~4!}mUoW>r(sRIL3h6`H|yaU#~c`9fHMZUAe#)8B=+kN*bS6|!CZF6npB#9q%(toz1tzO+fmp5UfDfu_`T@N; z{t{mc^11FFVyOgqKwrcP=oh-tEwGhe*57$pfLJ0lcS~d#4zVq~mPdRpb|B3CjUO#f z!$x%;5c+7;kWw*KiuRSs2*s5>2@ZcWpc7hSxe6(ABzV1TIQ6<2k0pNKZE;pDX7(eu z5-chsC11RE#P{U5S5!#`PcSWpY-Ai8+=|r&aSf)*nn;EXR9G{f^W;tC5gG&v>!_?Z zY(loz-Xwix<=B`@KU9~lJ?Cv-Nq<=)bT3UqVWp721ZC{gcedjUmb^O2*X2$5eqxjz z?0Os5A=|1C7A6bNW>r|)n5TbSb<_^#zLMNlYnHL|D(ex}W?t$6^9j38q$bKJu$WG` zGirDs#3bl98Ahyhkog5CLXPnRrg>%&V$6;Z702kO_N09vNp+v zy*D=|yK6hUa)%_H$n>IOzvg)e>pf1s1vELN^s+(By{IcWRSbq_md3x**D`JU>%W=O zA?rrOg1AVpcdm7rQJEeo;}&dSh{S$dTnL(*ny4l1yflCO76*SpJ}a}6Kh5!#W!jG+ zcp2R7Wx$|62`cyZ$S{5F2oxS=-Pfvhp4}?k)p|)~)`VX6O%#}nQ-SQVsL)6@OFpLp z#)Ue%;Im_;w}u^-NN*YGVjZU{%ShPD&^qZ9W4aPBG!OQ4mJZqHqZ{Ql#6pkS%AF5- zZJ(cw*v>0uPG3b?(@w(GkCtK&%YQAuIx^h(6#A}EYXDmjiIfi93m-C~^(@fgj59x) z^bjEdu^W$w$`u84fb)&IB7z4{XT^=THb-t;HK`r4Sq+~qM$_cPK}Q1lI+auy0`HHj zFy}*2`NnYt$8iC>zldA$*^9%i%-Cao^OTOeB~x(?z6EF*-v<03bG-#}p!KZyJ()+e zp5gHJ_Yzmk#kq&HxrcPC#kHtjv?X}8{?jh3Gjat2mWB|Vn0HlDL1U#d9b<}}B-WXG zj|VNEr!bV{y@Zyl9zS6*wBhN5q*0|`S2nTf8Jw(qqOkKt>ej9f1O&@0JIbszbVfn^ zDJ$t8;=ZXx>eut^4H-4O57B#{keIHn;%;fm)F@JA_?H;GVv29R=1(QyPVgomsDK*Hn&Pr*5YyFIbguJ3Xbe z^}Cw(bM_?^e%Tco$k>OTRjoXskF|LVKUO=xD$I2?D3gC|La!;e_-e=Er1j0#LvGwG zzkq@$997$E;=b+U_ZVbF3Uf7IlkntRIhKX^m7~ZfKClXFw-?%+F!yi8K$!Au|FWO@ zMg9(I==uL1jI1iHSjV$ZBLENW-`gJl@*9N=Is%L-8i2D<8B%)Edn!+F%-M;QlJ}Jf z1!Wj1pn*c1kOeks(9enF3i$^|C#2snHnjHxV7{YhHCM1BLqi%bdO`IY)mk@k-f#QO zx+_mi4Xhc>gT}YSN`-GhFl6t^={$e$Aauo; zb)(|JWlK$yFu!l|-7JW>(JN*F7=GRgdj-o6U5<374`Z6fKdp*0x{^5A|n%fXW)DMbJZ!OD1sX_$*kkL~)#G&M}{5&Y%Z6eOzX#ArY-d-d(N0UW@s( z$H@?tV5)AP?}kG|-i+)Pu)GBn+0aua?R7?FS~ix@p9qrnezq;{=iE;gk=Ncp3A(?x zuSyc)SU`60GMJ60XVLSVx^`|`m<`J|4EVxxYU-i_L|Io%(DnboCOr}z9o|+=KH6x* zXpWwL>aYOV7wm9DwmITcm=hhAoSYXIig@X;9?0m(UV3fGW#@@dkN({33hy!@fq0c` z7`}TgM|N3(j~de=^3A|*gPm3HeaF>&v7r(@1KlSE!P!TQ`P5c5S9Y&8pq0#CO0b}H{(MpdwB~`M_k+jygDo`vDaR`znwBi zA#fG&w{l+M@_R*4EJ4U3K`1U_QtLgpO7hITB#t_1k8Z4}GC%rgLQ{gc(52>^5wtpn znVDqLL(JBoJ4)NeY!e3VXp&)%s^oX8;^+(|K1)L`CUat8NaLrh7j+MJ^{`A-g618* z=NG(;eqI^nRy@5El|+(lF!c>ju>4!+h?V6^Z0KuNE z4DX2j2aDQk)|f+*Dg7`z13z{1x<}NLojcz)^hq~9^FA72@iUu*3cIoylwpqKTdQ(? z*~lgg(JJQ+tJDzHljuxQ>FJ;nXGmz>SPdtUzB!(R*r{S=ylsblMz*u<+zS(Jw zW-z~u-N@l@FA;lBzrv%nQf+NSWXQugp#PqADv>XnP8_1yo>wR${pbA_ztZ>NodOMS z{Xu4rP@u^O1==2fP@q2W_TT=u&?sb<0gbu=lwII1!0RJq}_~fRjZcbm*11G zN+m@)OmQwll4J(;ku@z08#Wx&R!*dHRxxTDiq`1$aNRD%1E-MMW@0STDl_8;mDbFCRy|R&efG^U@FaaT|s5YLV57{z37hjXp)zShHNfhZe5aL z?Cu^IR?qfKg-68%nm4X`(NxvFIC0bKe8jOv6ed=wOK3n>Rj=t4osBp4rrc$|CxB(E zbl<;6>v=-H?s^;^?`gUJnCn^J7wFuFHUfe#y{~b&_aSIwxD<|9qmM!C3L7w?L|Ur z@1RI*@8!y;7f2TazwkBWO%yE08CR5}F0HFU&Lg+LX$k6mZ62?$&ReY9C$$Zed-uQo zG&Jt79H0$AVng@OTvp*R5QjmuGKiBI@hJ*lnvH~vj6sWFdK^i42H;)R{<@J;X$A3P zG}136vMF!)IuRzmWEk{LePrQAf-a6@58+m&+IILCN57qolinHUe`{CjtpDEd)bgiC ze0<$?Xly^KMpxK$V$KlW7O!(&LiPGR}vcF_XN$IR)|;g$@sq zllF(%wKqRNs+5$YBgrAs9$HhoS{{Oh9qWBMWk^BlUCmlUV^xWwkm}}Aae)~-jj12K zQRMABrW2#8F{eoeinfF6AH!U91()WEm^9r@9V zh)MqlWj#4MC9--q>?Y?9O|VwUe1{*}-UNILy^aTSmwmOJ`N~-*P071n zGPybN1D2LPi+Rd6BNv=&o;1$!TQt zX4!la`2Kb-Q*gJN6JoC;baGkIfPvw%=1$d0zQfM(%F#kUJ6WJ_ha$5lcLFxHthevD z7G(M+J=Kd$6u&2|hKBe3B%`o{tGhy0M<~?(e5gO3I#3w3T<8-+Y&v zu>Q!>d|c?Y4sQjSvaQgVg5#&*Q~rxVR}?Kdb0;e zHJfGPlcf7WKT8XMO%xK1UTwT=;{Z>5++7`On>FmiN}i0G5iDnN!>%z_Q%*tg&q-~z zug@PoJW3?i!0ig0GtFqhbw97hvVYCR8r$q=M>68IH=Z~HCE|74<8V)ceMe4`6;$&R zym2<3jQWx6Zyq1EgLz5@uMM(!-M3o4HEnvsvR7JmlitwRgYKhY`X+@rburXzWKw$8 zVrDK`tXL_e;q7#s!mfJBgS9|)Z}O+~3m#lzVQAP-aSM%6RcJA=E?h9Slmf;<~GW+ibabT zcInkIhpnwWK4%%8(X7O{#_7Cpx_GV&9$PLcoUO&ry246MSH!Gsj!`4RiDOTzll zs;s^9Q@;f9i=X`ajtGuch+0}>tdv5#bnnwdg-ifaH?9iN4o>DbxsB2gL)@=IWe2zq z{a*Wt<;IIHZ_t>HUn3xDFF~G6(!)%3-IWRIVfAtT7;M@0$5$Spc)nCQ#}9H!E?M%a z`{+`I?yO}R2Xc*XO8R1J>XzF&QJ^PjHrWkz(%QzP$OSW*Pcl)y4-xi=Thg86bdgqo z&%RK~7oxHV)Pv&6Sy$W#iNLS*D14i`Ud7B2+{nDHb4hh;@|#Nt6j<(d3LroR;TrYL zRSgg^U2&CpJUbQC06EVdx&6zg5#U>g4gJ}kI{pEM-L-=4wdmzulwWJTn>JwFh_$jl^Ea^)K+ zMO()sT`}w+an(~@Urg0o?`V)brMCQ889jC#)iqEBhk!o1(Lm$5us{F({;p#2SLw%B zn%^;5B721BcGcdmC=efx;2-bFqaU=hXT~{+*Rpvodsmo^kMu2XdyWCo=W<10!t}%T35~DBnv1 zfe_(ea!cB&LL?uouCYfLS@l&6;5!o{xewy!tshz@1dK*{BM3l4s138%e zGoD&ii>@A@(lGZ+JHnstWtY7Jdb-V4p_TdQx;&}~hay5`wKDqt92h`4#;h>U0TW-`QDZ|{tqf7IUGIY|;F30k}|yIa_d!CbZk9}j-a3ZW!~7sBO^KPr1~ zHfvgO@`S7j(t^K>6iZyrZ*YHs8>ycupu@h_Af|W=U|ly{v(_2>JixHcoc8Chx1X+_kb4IY z%n1Ma%HMr&X(YlLpI61gnUDLyY)chJrO?&ua=Km%IDK{Z`i8af+Go8`($no*VE3c! z#KZoneE|-w51of7*SJofnbbueYD#djq`ZqCaYH%IuOryxZrXnoi0340YxIMDGpOyn zxOY8%&OyYmai=Br^0cVl0mRe?5}5EPVt$ZpBx^Az1t$bQZHx^Q2{+O$ofbGL;c3qe1y07uC8<%4tQgdAjFRv%hkcpPrZY^M?8`PA&T&gJ4LWS7@v( zl~^GPXs^ENJ_#x~qpY8+ptQjAE%I#O(sA3#wJmI~w%H?!hHX@cEqbf9 zP%sZ!^zeYbYV8cQev4cEfMZ%-FCN}!6ks63(o_+M2T@V1;zANYoBh&_Om_r}?^LLL zO%Oeoe&uSi`*X(Xefvl*WeO}_0^xQx__-ZQte;zs4?ZVBu-&xOrCvF z)65lgmL^Uboz(l%R`y0Dp|J_eTj!?fC0NT?_5C2&rGG`SR#82~z@d>udf9sv-6HhE zeAGZ#x7Om2s+(=h0klRu_?_R_PC7*91vHJIuV~Z-oXCD}DzdN4%?M^?@KN#GTi@DH z9p5u?|3leram{Ym`Y&I~iNHWwNSk`%F)~p} z#rhiQ2+~{iJgLb+FvozdZO}?a_&UFW^IMr){k@}a68c{aBLa2TqQwb7d?Gj3+$Nz1V&6kU9M8Fu}lK{s$Jc8N|q&;Jj~Nw zXwHvf3p}g_xMWzfWr(DCu_OeFeL%LxE{m4QV?~{Sl*Asg)D5@#_c@~DN662k!gDpq zDd{oR;ndihmuiFE&jNirPYZ@@%Lt%zW*D3VI~l7JT38Gli-}2;W5`;G17<9zVluXd zMj-^zX)HI<<`{FzA>ymGVn+FTL?PE(DDrY^m7RoGInQQvi*7bSZ;NTS!-(jRdwO2fVeoREh@d=%}JTJoTmyNVzVt))=QTrqu}pFmJ~rl|oUb>MiOEg1{967Y&55!-y|X-bqeOmr>e_&| z7IOJ*>@#JRy+Eh8ds;a)s~e6O1(`${kA6NlEM0{Q;T*p+bfZSl;c2(Pql-Hkp4S?e zSOYyopkl+~f63I9A=a((Zbt;4(P3}WA=|GOAL-KA{4&DcF173ov3g)Sm zUH9bAN{4BFGQqXLC_0`78^B^-|5$f{FTl=u`-x(~JS>t6#z%jkth4cJmaRKIZA_T`#48ht<)) zf_!NqLx0PT!f8|`bSXEiTvTAh|5PDUwhn5y<=fa^ zOuYGEy=(q<9_Z*RZ+xKNbNprF)4^?fN46ttOB>_5sC1-^q~pxDryGawBlp&W>1%nb z40@=QV_O3C5aB6ZnbBFp33yLdO1yGn{C6?l>5R-wn8qZZ+sG-inf9#X6J_h!@t@n5 zx}Ubh3F?0?L^?Z^Rv8%yC4-Sy8A-8uzhb<)g08yn(JtIr)-`Gdx|9mC_{k`dZD8jS z76QPkc+Gd1p^t}gQ{1M|CP_izR&)-krqft2D^(ujP^RKQ!iu2W0WwoNY23c+1s)og zn9uV3{Ln4XX{B%^Q`;#=M7-NZ=x3wpOA^(=TOj;2|BV+sM|sL2Ey#0o;q%fOXstHckkA zADKUkZvi_n-UNu#S@J=j#-ads#=8s`=_)iKjD}l4Vv|I#aazf5&{;XU@&P)JCunmQ zio6C{FbwPsZcE%V4$IF7=xoxL+*s6f2gyc(Zju|azQVcD@?CM_IxhtWS?0$MhNfHNM2&#w-$CELK%~Xw?jLwK5^+x3ooq8=-+>STG;y+^i zcSMP_R1Ky-#gqOdP$d(GJ&VJ%wvIOBKyJ7^ugdbjatd< zt`U7QC$WqC8k!W;248zZ{PBYt6^ah0Ocv5E>3y?N$=-haV~HfE^Z2)ss_r7HZkGd< zh!laIlTL@v^5SNsf~zDfC39as>CoeSeuu;E&Pl6s^km&dM}FE#>zpLhN#|aIx!-62 z76#0uv08^>#9~6n_0?i+nA!Fuqpca&-ZYfUmBUBbh6 z-Ic#r#9}1tx%5+NlRxD!%`h%JRD!2cOh!>V*KhO)gI+VwtuYdeS}DOPh70H%yCp>MLcfv3VXGjyIJ7YoL?7)R!i?^{Ep6B1w_@9wH%&I_t9 zwkSie(9{~KT*u;YW1i4O)r+>;4~srme`%72`0|1M^|RMePmAy z6*}bJ$RC~Gu11@Mj<%$dbBD|{mWV_l4H^2Re>7Mr3U862TP|77E9O(1aB?;39@KX= zCYg0WugaiRRosV+Tp6|e64dKE657^$`buHjfYm;6tnBG4r0wUu?#dbvikFVTg5}Bm zco-y^j+m%Ali6C`uI3CAZsIR^yBfEmiEGd~{8(9b<{U(I>SsnRdhB>MoVuXL6<-T@ zUma!5B&_q2Pg`ZwE#bX8i?f1SqqKPICW>6{)S4y80>wm>+C|o=?!`pns|M}j1^y2R zTbsEA_?aj|c}OG4MuI0(x_TIhXX;_pX`in| zp1pDY{t|I0Mb@ds57vej8~f~p4J#~4TjS|7xR@824BK?jU5MGGycjU?0KkVG@3 zT_wqA<@G8VdASFw{Z{NG^gif(H-D}iJv^dW@DdYtgs(EGp-*5e)mh?>Y7Ko#TaDpp z#90QW??WFQD`w+)z7v;i?#%-E{K>E_ZQsx?V3^A#fSd!fz%QWg34}{Q!m9}zM$FFg zgG!ZdCcky2*Op^6Y9(RfQ=!>;B@~s1k>*UP^CJqs3r&x3;#9)Ud^yoz@6gQ;_u}}= z$}eG7)EO&5P&_W+%hh>)(-T1;evyRFCMHeCcsS}vR)t?GqID9P840<+y^(UFkE}Z7 zQta1M`3KmdEt|%|VAbZqD&WumZTmBJ1oG!>Mkpd@xZvJJMt)>ahoQdFf+AgAofoJ< z9a`k$_NpLxJ}xyLQXxhh(`smzS(LC{Y4EaXDXyjh?tvcBun+mnWXO=%C5oC|xm zy_I^ru2lAE|7YYUoDWp$Ry!kii+Tb$ZvkVkfX>TjCR{bchb153!T}vEL|sz``6VB> zlIwKVvx2$@6Db4pxOV(Q3QTN#-kzjfbQ{R_700JTD#D2$(esTfg5Qkak4Eg2h)je) zVPi(PRNO7l)_OvxYnzNPaUL}UEWNlQD5>eWTVwx5%BS{#Zac|ySi~8WfLB*e!A<}L z`WZx9!CtU!5dR#r*mg4_BD-wtahz2a^N`*GX5nt z7uHD4PeeP=Av~bk{UFdFW@C8Z{T_92Gx^xTE!wBo;N#;3pDDp+#uo1VhFt|sd8n6d z%Kp$_rT*362h$7!ixIFZ}a7cZ@F-^t{D0Y0I6J4qesPI!8jP7fAGanW3)Z$ zgu7UJe$+}W@2~+=PRQ1s>-i6h9itJc@a2t>1K`5x@gdm>q@jz28J}Uy9W_IOjg%^+ zgJ3KW%=c_Cp+g_8V?ru$^fCWq|2>7~;fDLkR~FDqJ2kfe?g5^)QxqZUf;ntpxUunA zPl|M*c#-#)h8N}&-o$zj3B@h5pS626IIqeHWs+PJNZIhGs`vQp<+N7%`DyT|QUg(r zVNU!BHtfsVbu#MpLR6{~Q!nSQc<<{~o|xEq54ULwg6vyYe^mwtXu?Uxw=*7&^G)eO z7JYN`5~#XB4xK-X8upz&y$E%z4b!)NY7b7o?%W8-Iw_C##jqqxQDKtk97k??49U~$ zW({grT4Ih6SDAR`EnESAHmUE6R3yw2QT7!d#Jzg*QAw{`^zlPeuc7+Rw4C;W<{*iA z>BVJZ@efNkJbfs%6yc|d1?$H5U)VYsaG)|{%LWl>Yq?=_p()hm&B`(%KOrSqre^EC zerynLlHv8yBV|w8u1C~0KayR`n9kBDL0;@_@5T6A;%o9Q&sLRNpuFJ**AD}Hj^?|& zT#{dNNS!Gd`}+HYDab^k%#Y~hhY-Gn6}}ZwZP=Q&-j#MJ9})vXn2N$ua2awfR1w=B zsyn&Fb@1{n5EQT_`%#+;1ea3#vv3vtE2$d7IS|(GVx~ENN@qs4*s|bkA(u^s721>VESi}i=_Nld zak*W8WN?7DY^K;wxe9}dNMc`vf%3$&4G1%FQgB|3bXaaqP_3)Rz4?ZX@&KFb7CV4Y zSmVIaz_(ZQ=SX4`UpZ$aUM+OrxC-1jL@eN{r>UkRB}1&ic88CDJ^b4wiHhsrXHoRL z?;55xtjK||Jo`&()!#c>C#noF=Z^rk3`MxlPLSfuKM#>#Tp9KS9FD&b!WB9wbueMW zXXfD%KirM&Z#nOIk0|6=c-We0>SA%!CMNFqUZTv4It)@#YHG>#<$Q<%GqF=R&l~m@ zwJ_rm31Sm}l%@B2-{fmS#b2NFn{N8*E%zuRY$L(4Q5GyH_yi(Ch1xF74-QZnik zd;gQQ33GHtD$+v6OVAiBYVuhLb`n!nysUg*5h42UJYV1|XUiCtrOJgy6I}6oE|@E| z?8|Khrd%`VQAaiN4dtnOgxYI3Q^rFf&lFHt6|$J@VaR#kWtostvLOCUvrwRaV$+6g z-A!emILT<0q1n3Y)5NE?P|G^5npbl~8S-Qxn2+CNeF~-+>#(TYlFx41WqE0o3T2kn z6Orf3bJXufVnu5clziSK^C0jjPo}i6ZuW)U3SK2@guScnOajpq|IHRC9KF$|nYXyR zc(&ckLO8Fgn3vWPkSFU~X(^B!vp-2m7NKyDmeassKNZ`qLtjWN%rCIwAiVchXvU)9 zZ7#}*#Gp>X1owIzA(tPEx+MOd3M0g59G8n690l&0cx6K{xwUwN9Rf$>d}oulXz{(c zL~XwXkZ*yJ=okM+C^GoZLA*PS`af-dnC3+X=jHNQ1>T={#jA<{LzKPIZkO&LY}*Gv z%P)OfwptfhTw{AlRx+rk(BoZk3Z1wm@nr@3|)>~gw-%6UMt{E zRw+>A#W(bSq6`+3DuQ=3ZtI(%j6EK`W;C-F>?($Rcl`j15WKM~{+?ljBE&9zR+KDO zYY7u@BI&B}O5WX)lw zyuPJlY1-~AawKluU%~yU`RK||ER9|{Iq&Wod?74s12j7{19RFQkrrTc%W&O?n(?iG z-;B_d+A{DRAsW&*o6IAOKd+OK(i?oZ%4F?1lR`+1-8pJnKDEq(sm|vmGbsUh&1;7lqX5x?B zF)`jhfTQ+fke;K|*LoX!(M#aFE6q!P6YhYOHAAL>2x7c6KM8TxZ$KMm;%OETHU4^h zn+XZ%Mn_cs+#e@%5kl0RNZ#l1(R!6+IgXiy-Ob$Oe7CDMEq%4t~rSG_20f|_5bV9%YPKI`)9xZ zVY`Z@)qB3VQ79^hOfu!_o!3T82QDvtw;E}I4QWV+n~UUrHCctPP$ z71)#>Z5D2Ui#uQ#QUCQ>2ARjE%#nffawGG+FZW9%+^gW` zhzW%OGIeBr3uGXYQ+y$!DXp8(fJNFs9x&^Rzg+TP-9T?HN#)-VroSWnh`X>X6G3i&u4wyl~We2pN%XJW90KY^fj(a42t*K#% zgY!Rusu3HLsKp&skW~oWv-dYbn}{COYAHky9ij&f^s*jt4}yS2Il!r?a%3|uOm`T{oh|R-6Qc8 zL0Dg`ju#K%Z|pQuDb+hkW9YS{Mzu`wJ2L-N*7y6w-g*|HCCCQWqYV0FGOfJ$zU-ul zsQXe&!6KT^zpg(2y4j_rJKg#-^gH%LM7DC6vF<*oRZ0V1-{zT@4GsIpg73 diff --git a/jay_imagenet_for_co3d_val_0.1_images/ILSVRC2012_val_00011048.JPEG b/jay_imagenet_for_co3d_val_0.1_images/ILSVRC2012_val_00011048.JPEG deleted file mode 100644 index 4046591cfe79ec524ab8da8ff29cec1f37591fb8..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 21248 zcmeFZ1yG#dwlCPYLvWYi8r&U%yE`E?ZoxHpfY3Mu3BldEaSI_pa0uSGI|K>tGM)48 zz3-g=y=UgUS9hjrs$TQ$qIdK4uD#Z`^taaLdG2`)@IqN$Nge6f`tcbW8#)ObiT6QapSd0!lJ!DoQd63L5%XOfq#ECPpyhK7lONrHt%!t;{iCC~rz>$wwviwd^}AA$f!3xLOkL%@Z5 z?g3B(0B}gK-Tv9|zrEn#5fG7(QBcv)F<>_|z5u|(At1mbA|N3lBEs$tg#8_Wh>L{x zl3N-XU+XOjts4PPXi`2ZolI>Pq4vZHJ+GyE7#cbeF$pOd10xeN3o9SLfS{1Di0o@Q zc?Cr!WgT5TeFH-yW1y9_jjf%%gNLV=w~w!%fB1)p$f%FepyZTKscGpMpEC;zi;7E1 z%gQV2>KhuHnp;}G{^;)M?du;H9GaYhPS4EF%`dEPY;JAu?C$OVIz2nTxV*Z)xxM>C z7aRcLe~|Sbl>LP+To_&Oh=>S?D1YdJgZF_A1YAU?q`70<1gHM>D?#mgooT`z>$~ODc_aO zVSqK^dit9NgZXTOCtS0xbXUm*lB@o^inQ*lNaD8s2Rd@}&4lJ%$y@B~X^5Cb>OpJzbhgeQ@>1L!RDw2b;TBzSB08Sqw{sZVqD z9R3+FIQH}-Ca+#x9dyoj)7tT5k?{;@?s)pq$XER_T&E)px*w#OCYKPrz#Dt-Okjq> zCJJI@u{pbXXl3t&%S@z*|J1`lpe2Pwi1qUsU>mx%*itsSw8@YBr3_N+#HhQ#8{hiq zCPi#l00J-Ovu`Np($sg~bYUP+vPp18Ne0M~&PAKJcLM{nr3v~xcx$~;1 z!h@Le;6beMjb+aO;GS`O>9*f0uqkgvh8GwcR4A5?V(v`f|dw1#39{0Ukr}mh@AnF;={W=&5eg>rCJWVS+@F_ehuik3H zo|SH(aka%=yLx|%mO1r5e4qO7SB9+>{_9edYF*AZ`0u|R{P)XFS6ty#LJtEX{%`k1 ztzS7O@&B)FN&Z*ddI{qedhnk9GhhhDUWdG=@t)wH|IXn*taz(advx*tIqMbVputUi z9HKgB-*THw6x_Ctm+So_`&@ZrFMG|a$ZBK8ng1rmecqT7Rl0eDHq zjNs+th8On=Q$OGCOoM@ACuxJ=KvDe(_IET{2b$EZa*0UhEt1Bk(FM&@WR;l`+og&xdK7UJo>?+~5{&v|}2=w-!UIIz*}p1Afx z-q#NFpTZ`$P*bX%BK$0`Ld|W3_T~3Rldtmae)5#P4O5VuthDREZHVK}`qqs98-NLc zkA31_-*6X&#<+L47A!1PvKijJk_3Gvk4<8J283j0OEB4C%@`5?iu~?w(ntAA+JB|- zwR_>-#3^U@>iOB~^-;%<$eV=qW*M@*wM%4y-HYn1-cA3h?{od_T-yrcBd0jsc&`WE znYtia^(_UZY9I8Olecx$w3wL{sOZ};zkHXK6yM(V-eUTOXLQJ?B zMO)_>`1X;t{depIMm5vn9n_UD)of5+6dH`GZwvD0rby7|9n$7IYi+T4D+AXx*>G+1U-VL)D&uD$4p1RT{?^r6`YaZ;M0kljXgAYkPo&g1!6_R+Yl6Vp# z1t7M0af<>FGvJ>O|Cb(8sAYzy2DFXzp5;ei-gq{Ba!nNFqIi*;R0Oue3`*8-HO@z7 z7@Y+A){~)`Ho9(NhkagMCD6LdvI-n&Cju=wgg@b;MDNL)hmSc_&wv{1<|mNxQk5jq zGhiTk{r5exN$_4Vc<8kFly@NQ5Jo`~(jUL_PX;x<5|@GuSLK7@ZS8?^x|?)>lM#dS zy3_~aB|~PC&0698sk17!(m9YwQdm%F?tz&?*adH88(;ZT968pZKlPRF-NgeNDaR{s zk9`}{li(=1A6PQS@WZ)Fg`6l$pbO>;Hv1zZrl#m{OlMXulT|(1+n$h3w0II{wn~Haxh$rF)ua6-g7UimWvUl_}Z@096m$0S1&bj#h zu3c|c{tm@`E-itl;&<_?Du%?{;&bs|O?R5NCB*y}J@&uXK6?4Ue~&%cc%8&#Th&bniEhxkPntMNd%$u;!Q5g$z~g1Y_EJD_^w>eB-DioY*K{&@NF@D1 z4iN(?9Ou!=gqy(`bF#M={4NV>g9*x|;QhgheL;P7SH{>frW+054F%a3Px@ACk);Lt zvVo!bF8L*FGzzj>)7cQ3w8bucE_H79cRWIUT(2Qmo(-W9e?;W%&D7ItjTg^=IUZPe zBggNePaP+@_4*X&G5l5Vg^r%WhMdBNLcgw-TmVxQOd0`y8+mxXMU`pe$GvFyEbf(w z;5L4tg%)rN2OK4pte+wXwa&r~d-6AYG_6KUPW3hP6&q0 z?q1-4ct^$!yq=kM97p72hCKteUY}e) zhI4dWXwPiFz2Ss$2Pb%rXA#C64i|$s&gh)i&2QxhbpK{6{;5Bkl|H%1Jeq5kLAM~? z7Jor~ux~z{Z&zcuAZKe&qT(9~7(~p~`Q?bxJ+c2)?vr=9kUhzcfzI`plOywpl3To< zN<`3R4U_Plfyn#g-cecO7syMA3Z#l<5b~Y{tkE{tWWyf%Rtj!*0>VrBJIl7!7YbXQ zuYu@!$$@Q8@-2bnx~Z3veD5cnEi=SX#TC{wx3gLqJbdN%#u{&AH%w|<#A6-grFn_Q z9?weBg2SH}A_ZJ$;)x-B!M{@Xo4&Jj2skh$^IP=q2yI?gp5Mf<*B6h@(zu}WjyP5J z77BdcxC`!Xmk(H_y|QZTUqK=l8Wj!U&V{g*Nn$sM3Xvb-t6!rEFLb3}Qq1a=Ii5Pa zXga4O`M8t^bhVI%_~=8)9QkfPTUr35ufI@rqkX4Mi!}O?l2VIjZ<+4rq>S zN4Q;Fmpgm5lL;FkH6(>+7dmW8wBG4#+cjcwyjT=)8@A1gkE~D#I;1%-@!futG=3K^ zE?)V{-iKdB)`2gcnZ#tFVl(d8PvVo3>uvPHKEBX^bczw~lFW|OiqnU+^JlkIej|K})g}(?>oWsphEONV>5s0B_gK}E? zp)x?>zhUz}@lW;AWlyq$hUWySm0Ja}!>)wy+gWMu-)Kbl`B`fQr}HP3`36l@A*}_= z4m~Mk9&E*(?%ZPXc$`s5H1fACtVX3K3uIXly2S^hmrW=C!YchLRuaJW#im={(A#Q@ z3@Ps5)w-jA;W7XQC zTJANLILE6k*BED(cQPM>MUEy%O-j-5&J+jx;Ttz6KfJ0qRIzYPO1aC@x$93XMun7S zWm9Xve&w{aAae9FVGG-#K)?h6;s&}5x5Hzywi&oYcyNqP9>j@BJRz0rVc=5 z`aA6dZYG7$CrZ11)v1CUiC0;X9&^=RkK8u~MH17ZPfQ$Z% zhlBS`f`a;V+f=FF1h`xdo9$ofcZps1ntAjSrl7u@t^I9`^d$W-A_`|SPe=o&%RDGV zRg@fToLLyML--J!t)x99Ek~dUXBfoy3`oN7k-WleeFjXZnWRQ-YTV+A!O|Fu;oL`p zX|0Ghaz@IFRQ6F4)^cTfCnrw1vh*&@)G|XDbCRwhp$X1nm zund{saAJQExP2jrm-nklpayy%gU@$ zcU=156n`krnA%4Y4%M(j-PfNiSwOAashzrvmfrKY7RW#W4YWkr^G`43byZ0Xlj)Rt+HVq8l<9nrSeLt#=MHt#+tg>2=1xff>bV7oMC)|UUHgI^*o1%$NC`!0xoy#wpv z?PEgl){=lT#f}`)ad4AF_G9az$8XF*_8>bN$WkoDGayTkBes~FH|o@q$No22*1+R4 zV62&Ni_EI>UD>Vm7t0}o_!h~huUFKBk*K`MV@Vg(yP2mW+LAQ~E|vYKwo7E2(;uRZh3O(!l!QkO zLuXpC$SB=B=(UCLrNcx*$Bavf_Q~K$;*~g=bO!g8${3*L##QKTwb{Ac2%ts+Mj_g4 zT6;wj4hDb(DNvD|cW9lwAl!(!E--%6m9yDL^b>V|=1SAc<;`TnDc3b-(x*nk$Lz)h zq_gd$^I@XSv6^RqxP8gjFdGLxFdY(aR{4De@dGP(X77l!iN>P;GyUL0gj26WMLE;F zVO@49$1`B&qO6+M?z&>JmukJ6+I-m|1?t0c|@xe}|u`ALRIB%t5AqNn3qc^?pbt1tQI8n`J zZ5yaF`8cEaITWRY<#e?7%Sf|n=&TQ=M8kMg<<}n+N@Uipi^;(v&eM~9d#dr$Z=^LJ z%NJI!wPZ@^lI3`ms>l0YPgHD{c&zU0(q_$S5oY;~ikyd<9%o8SY+yuT8nV7Xqquwq zT|?@EpRCl4`Auzy=&k(cn-B!`7h zBK)5-706rr6z{J0#1Dq~-^BXhs%rPXUes6}g-WWUNtSM!`oujrdu)*J(wa{Jp^UFO>x^PZWqPj$a%H1pKHFjJurnZyPZ_)SJB5%YzPO9Z3 zFIS%d3WH`NJTiYUoxf%;&HtF*R1}`l7M{{=RxlT|r_B5ZFs+q!DpL;B&u?aGghhA^ zEd$rnAC$0+L9+`Gh8l!ovv}oTNJJ6}00wdLHJkL4Zr&Ybw{CtquuufA9U zotZxLXC@p`U8ak78Hhotovy2cW%u8P=?hM7`9pu~0l^+&*DKjqzy0-9qqq0J5EIjc zk64-wq4CXg@~)ICu>j=`z1M4oK9f^qDl?ahhQYXoQ~RFyPtF(BaY*V6%lXn^TGww%KF+GVLLNr-)9(L|cF zi}}JjgA4-8z(~Mq_d7yJzT;yQFXv~1pulpb^qBlN?xk!Oz3*>uz)t%j#iKcv-|9d0 z`vIAAh%QkgsWwZgc0wAb69P|lu)k?S4W9P>XhwDFsTJzR;!Cl zcSJW}IGYcn7rUPBGP0oPNy?6mgU-e%mOh%VKrxlt0zwxTPevsXOwSC;^Q7m={LQ+E z(t2|UDKLG0$vH17wiRjrG;r~T1O}sTc`7v<_vwrOjQ4vC27BTD92Ujj?m5WOOt2H6hrmxtn95#k4Y#zW-1jtsQO+R8|V)#cb3^erP|4N zarppunD>cb=WNTpcDPIH ztR2Yl>tmLfm(AzubjUX!Ob;bojM`szQ_oiw81N6AoxVf#MZX85UAt4r|=<&%3G|xq=KW z1$KX^ph##k9GUVIKzgpStE{-q_cN8gViz^L(l9kx+e{89sWX|+tV+sAe><1M`^4u< zLfx<=&vv#x>arEd6;L#k(1c%ST_!OIg>UepQB{O zQ1CK^!Y|%k;hZ|vcX_%e9sgThs~<(0jria%M=vH05z;;ZXk11!WZ z^Wq<~RDD>Y_K#_-ZV)}}dhaDiU{L4Z^+z+6k<}QC!YKQTQPt-0OuCZUv@zZTK`Opn zD1?5x%$0NJ+=O>83E@H7Yr5`31bU3Q9&2QnFO_wszYra@MtF&^!z&rkwyKG%;F^68 zYa`(%+}ZJPEo4l<;Zj*BqwI~yyp@fgRx$9+mq8;KZ1if1f~)OyyK8EgYQ1f#4j*=} zQm4Z`vMmthHXPC_D4R4$y#1}K=$&cUh=*dlv-NG*E6;4rtGe~>gP+sH=A$-bXXI!% z6I;#0;3x}PZ+_xe)%GQCo)oeqj-+Hnm4kpHH2nFqpzIF6bTkUsyv5Z+_YK%-tg^V~ zvqXc{-K3Txl=tuK5z5?mj6cC|_v#g96D;^ufd#*79kAe6{7>-v?vbFe)tv7;qaWTf z3>46&f#t*h4AeqcrxCc?GwY^+qEDuT>>p4kdUdH<7 za4+N;pwDJ7TsT~ImCRy9Qnz0*98!2zwcik%W57S1v0uX4sFlx`vrGM$F?fv$5=X$* z(DFe;vf_vj?Q^Vfbjw-&SVAsGmr7_pb03o|j_vHl)}mP+^W7bsFj@4pn3O?uPK-bi zJ2SO^TC>MmZ?EXDqaD8YN`sgAmxEsVs0B_nL3g!lUI>v9v0VrS-ZSL zN=Pfcl|Vt)=YqIquI+&x$PI%Fr8HwTk(j0THJQS zFH0Zl02e>aKpfIL{S*6=WZq9?OLM1@P0iOybBisGChT$DPsYN(0#C<-BpQcPMgZ%6 zzclz#8}w2?0+Yy;;F%dR*5J&Xvoo4P=@60zB}&_Yc1`ZXFI%KMNXg|zvMwI#&5S;} zbc)$FzSTEF{QSEEi}+pl5qt3Ln@6(C83VSHHDITR1x&F!?)VQX!1*VIf-PEdgfi&e zr=>8g6ISe~u^xK@Il!<^vfzQ}4eEPj(`Ue5Nku{rD4{1IzN`RrrD4cikX6|-j<6A1 zuiw}}DO7P)a{=}Rc2+2^_srDQ+>A4qs4z}wBsGC;mPTeXZ3$UjiXa2WO!OPclf3jH z)7Hoe;1Q8Ul8u;4QMOS&`+A^S^Zhpi6Ngos<`K)e*T}4wTlcgJSiW<^GnrAc8*DQk z6V=%8aia3?D}{%1ym?YGg?ns#S30`8JXj+S1kvhmV+Aizy;T?au_J4^%_m z+A}#Jm}Gmad&{xKgz=8pws$dsMUHOy6&O=&!OEG6saqp5eg&Wu&p#L&4Dl^SR|7B* zGNsuUqvjcq!!()q@XcQU-|oL4!qsGN@WrBCet?+EU%^EMn6anD&HRmg00%E;}*1XH4D;X2^Yq)_*PK4MAQM^b#dV zUMM~T9O?hSQAUT(<=7Y)b{=V<9cdr!WY^s3D84da6-N@ZJ7(>Q-7RJubFjzaX`?~T z)D;w)ZV%^+e3>;>a|>!yct7R+HeSRMd~0!r*7t{Wg;aW2o2qeW8=Vw-qlEUgkO{3ZMkK8*rLQS8tzQ5$ zCbdbwOrljN&`s=AC^o_;N(t2knHri0DGX$-v6tL*onQ6|fn(en&_@gFYS(uRBd3!} z;ZPg(?}Bh(&OvBb!_z?Deg>oslYzBIgZx4HIY0G7VIr;hM?$e19hlGc(to2* zHkxi^@|Lr`lQhp4HtCJ`K-s*L~E4Tf4xcIan@(egEX;O9z z)i4Zn{9Ks|Br$QY2<~sMUUyx6kmF1$4<1XVzIH*mgm2cTVp-9I&kQ>hSH)q{eg@>e zkUWCLMG;J?<@nD4s^D8vN8f(s+O3J9HXI$4L8!t*E9`UU2#XMm0w zUbharm>%;>v#h3NS&F^$+#xr2!pz4Jy%LW-lZqC4#%Ce1) zNuh=Mg#IZ`?Zpyh=e`dR1)`H%I%1Zdq;%yko_WYtA8W^e!1&`H!bOL2x;9DfL-GuG z-M$q(2Jd!jV1MQBGvD>A;W10W+YJi=X)tmNJ9v9}RFcL4IpUsu^8hjY7VM*DS9%ky z9DQhT-%-qI75+KL$NM*W^2yMekD}@31w^z*@P#Q7vyqi-9Ha=3ZE>sC)Ryqa%`-sN z$g^6|juQhJr3EGYQIfT+OKz#Msmjlge~${W4*rK@atH5V1O6V$O^PXeA<+UHlTat& zK@dT000%Z|5xLcNhCiN1!c1e?g>Z$W=OLeBcwCVue=E@L3xin&`p{aee|Go9TO%?S zc<06rVMnx}1qA+9&1M?RRD`0C08fcI$?oCEvYp1!HU$k%3d=qH8PHwc1FoTI@}kZ2#n6=Pegw=mDAy6GIPMpn&T&@6@ZD zJ98s+IgD&`HvZZA1#Hxs4wwNZ|CTPHN^JycsU{fD)NE4d1=s~vY7@Z6MS#H8R>l!B zpjL9r@=D8MgN?f6*7M7P?Q-j_jrw=w;rc2&Wy=)dujwpLK7sk(F6FN1US(IbkFAtD zlNUQe@02)~Gl#EP8L*O zRo~3K!9|YGTF;#ELJTbD`i;!1%?jOfIJHtN;utD?-?%lcJo!z%G1DK)J6G}>|Nq&$ z5y8CM;J@JA;>vsSf&)NPhB%~zl-U{4G59T;2@VtSw5|Y%alOI#Q898_u^Td=TpPeT z**U-MS2=E!DoYQNDH{~3qF5ARPsT#-(WvO6TEU2XdnjD3s;=&sd=dw_-Th|0C%_3k z3ohL?1928RGt>#wANR9*=&)LN<`b3b@lDdpX*kFmr?>M*xv8RhLqC36`xeh?WgdJ7 z4bhF&J)mx+E#G`#wWbtjO=bvDXX#><26?ZDoa?>Cy$ynXf77Un^om#YwS?hCso#5S zR`W-eP91nS5W7key&GD~6L!5sp@ zNV?Q!L(pqfqGO<@TPRRb!-#?H7u&vE;uQ(JRt>V~Nif(Ss{%1XUZ~eSlU~wl=G~#< zeRaD6%rG3_ntf6^i^WD_^q9G+vfD4>7`mSYx~$Uh_b$9oc{zAs=3KeMJ!jbv&Lv_- zwZdlhirGRfDXfQikEiGKXiRTvvl>FZu<#qk-noSU(H0xGkyi3PT7?V%@Nv_AnB*_s_~AL z^0SzIPI2)78FDBI_nNDijZ8MYZ0OuqYKRB53{1r(X)_DNr%EnYYG6<3(XUOa@8*)N zOfR`Qc;9^>ld+$Jyq=f1t9Er~ktI`tW0R5lAy(HK<1JjW@EuFTJKG^vt#SWSJR@LS zaNP+^KbYu%VJ{62Ir$MjlIfcLldXj}bJUvE50dOFh|p_2_HGuWMwN=bUrE}a8~;U3 zLEEd;6-G3ml4vBd;^HQr>H`Z~VP2yPl8IYt)uO__v4ga6eP544ngI`F3T&mw)z^HS zLNmkgW4&DQf@|`t#P`2MbpN@=&v42MGN~y5vA+IW8TQ}Hv48pf-ZII;vvRydxKC+c ztDDpkKv?T&ryFL#?r-pEdCYuvf^s$%}=xAOjbw z@+kaghFhaqaf+1-;WIKG986<%US#icX9^uiAxP|AVr4pe3Pf~o!8sB?>HE=Y^_VOUbhO9j)Zwu_RrEx2Bx zcwaGp9XNToUC9Wc@4kph$M&&R->OlTJ(QJ5*4>UvLa`E0e>AU;09VSq#LBO%`VUNS zn&}Xloo2j-cH@pEDJHGyMmRU`spHn(TbRl<+}Y+n(Hm&sis2f5=ZrbLi~Q^>j5~Zz z?G*mRUrhw~t@-l+tivf4?Au|y?`*XYPWV%{n^XM21r3lu|CFjJJiolJAmYUI_C11a7IKa03jy=QJ&L> zSK!d)EWvG@IF@9((Hd8ANX!(PsgyaBR}?oXi(V?`M_Rywf&Mw&eVRhcd{mzObS<-} z8@b8lXYiJwHQ_g;tn^oO-H#jdf)_6V(g+6 zh;)@n!`942IGd56{Y?Caw@l-AZGRxwoBx1ZYsJX>L%&JF0E@*xfJL)8#dpSkp%j0s zJT#6~6k7|)>{iEUQbG1ut z>vJ~X2}SPy?aXL|6?;Pqa53s_`%~#d=#7Cc zU)+6$;&iMl>yKD}C`rW&#A`ph(cTJxxB{|m>VzP`l6ImIf0IETkKQ{ zFMh6^xVjlE6P42urwBf$_P1m*#m~uAPW5v^tvFy3LN3A-u?iwoeEmz_e(f|Te5SGx z8$fqb8a0%g;ijsZMZuu>t^7dK76up#33$ivWJG(M!{wB=i+PC!@gqiIM1Qdnr>`p+dd4T?G~ zl-Bf&MtShKUtV_-a4BGII=Xu%ZsFEx`cb;0J{9EJ3TE45cC1RxKd>qEj4Qp0@oJ~j z1USLvd?4$bpqHFaEPtXLo*mDO=6J>WwTsv5jm6l$0aDl5EmP(IkCg>CA@i5*wbMrj zAcwk3X@##|v#-neGoa%ckl*N}&-4N99fncLEUYJGe4*(eZ~kxfSeX0Rgt-sNe{&xu zUVq7zv~ChNze*@a?!NP{<5U-!5u(>?Q#3aqJSy=gWa*o=DbnW6oQP0oY4S!v08N>g z#Y}!GNs9`P>7CL@eZfgci>Jolhb6%+1YRN;Z2Zf!K}0v=@cNGLSEIV*5Kmlg1Q}Jrw%ly{&^AU+p)y zX5Oc*Cn(R07S8?HXn^jjwb^{x;(zx=dS6W-d@^2V+^{9XTvls~-}y0pA)zioSy*O0h9WykRCv-2aziii>_dWEO0nLsaiFXE7e{#QVM<`h*R`5b2~~ zxQtiFQ;adpkfFeO(30|k=R{yNCpoaQK@?|nX8Vu-Cj??01WzNkk)PDGWm_!P+(D@9 z=Us^xcrYxEkG(HnxDn%M;Dwuq)J)aU6D)<~-BWR~ds6{b$6em<3gX8v z)Dh{TBupNs$S*W2oQ@be9a>Vv#+$d?Cm)Ory>#x6Ex!=2#PuSy4yyDzS=}3ky6=}D z{}>!#>Ug6unra%)J0O{UGh@8!?xzo&_S+~DREwp+>fq}9Q(ip?b~VGNikJ}-Fx`ej2G#ichaN%l6)l|5b=com7HoaJn|$7Sub&l za4dV$Lr<8hAI!DF!#3;WUiC$<9kuJ>*9WI#x1kq?-lcwHgX9|bC3Wigt`SY_ObC*q zsnk)(p%u&Ja9|6%?}J}%j|$d-N~=M-$sSXT1(G6oFl!ewP53#ZPcy#N=^3C5bg0RH zrLGWUZ;CkWyy5(6&nnE)h5OL)PJYd#ciCP+yT#Mg_gR5@l-J!?(OE;U4I zvhg0}T=cbF=MfRMkD$3AlY1Ar_@{gI z>wf|>@!sGI{|oiiP}^;$EX%q^CFMmftuz+^VX5d5oScmkZcF0saVg=p@o|6Aa_Y|! z!5<>tm65rcfsZo+i{YykWYBk!3-XTzXBQM!xMEgMa!5waHSwx`+U)Px0)#vm`%NeUKSbaudNjK*S&|{Md70qWp5wzl zh+Cgct*{YJ4S-NvmKqfnSC5&P{u;-nO@nSKCX7@SebY-~73~Bp-!iYp51W(Ql6`GB zN3E&KQS_Pq4nD8)7v$G2`s^A+k+zpM`$9L0P@WjtYWLk>1#Cj8DKW!#zVc_l8c+U{ zn%QOhLkfyMtGoyh%$v)2%2~vJA_eQ1nHpz9JyLmIb-INWhTiCaFCEjqWezR~kYJJa8*{wau|f3y z{tA)$uvzB`T8sH8?U1-{&hDx-b(-V0+k5x^(*NER>h#J;tflCFZnV=!RZH z7e*SPh6CD2EShkn-N;bUyKLKQ%nAtX0qLFn#GMPrOqv`rcZf^1w!PZ#YeZ za)jYUByyWnw|P#ur*?{^^tjxDc8DffUXI_NqRf4zzqp7v*&rH!W`;xc#V9SP?)XXw z()}Z;-VV%W>seQyI}v7>&?>;U_)syJl z3RN@7kFZ%_RcgVXB?Jilz94kjSN7aoDI=n}7*U?c=z;Z4!)M(Hz+A_y#v{q}gn{51 zU0av2AFS+>d&gZ_1Szh43oEDs{!(KB5B&wHDtl0bT{lPSGssQs)dHa-K%G=j4pxBD z+(m_s9^vQN{&GNve_pOO^$Nt*w1v_>j<;DyJ_^%)!NH*RYjayJ*i2$22ys4^_}DD9hYhxtxwafy-C2?%dxq+q1HU9*=k0;;OHD zbhYG^W(e{>?%6ZvVZ0c+|2i{L6)Uw<>paV0;$7KUp+Lnt!}(j3P2!gfTxFGOYP$Cl znJqRM+R%>GWNlC*b;e3VGDU3(lxj%1`(fs?@aH>YYXYRiX8?EEIr+?IIXElHpAKqA z2WkZ&HCC{U1@oTxA6q_Z9{$=B(wE*dM*Ay*wee(X6nDH^J$u2acu$Ltgg_Kw%&)gHI?rAQhR zXVoH0YOn%=O__0IFN)4?4#ph*!ayf-&wmOz7VRfcq^q#}rmy$?K=ere-@l}dR@9FZ0XC4f*zTtd6cH-$w-0(SZxOw1hvU3kph>kWpZVlyAJ}@7DbwF^A{j!wShi%*P6E zepGFVoOmSls&Qm5Ij6@nYVB*K0xox3dJ)@ID|^JHMy$SjK=qF%cO{uOXKIe^b(8$j8GN7`~eEztOFK^YqA*mFpkMYcrMy zmb*_)we#06cpR!9=zTB82DyxqTm>)YcU6goMitZ5W7J~7%KzR-1UgAR`~Z^%byyP+ z1W1br3et!9eX7 z8mUITjRKO_JsR|vG>DNEn{u^!t?UQH+UL8(W(rJE@7BLJ_99sWOI*%43wv(1sKH{a zR}U4*D<;(KB-F~;)^SloYW8mUPo6=H8RQ4*V`X_rT^D@0g0}=~&_n$3paxVschrMh zbk|^=+#hjKGoNUfl38N4c&PG%WU{}%0L-=A>4;^Bdt7LIxH+HE%mXawx*3* zXl|vDX3BEPChmQkv-Y7sTw19_n>ES{V>R9@IBZtpS^j8^JMsJoaRCn%DQHWgZvoNc zbdQJJKI__a#>Pxf#cXvc-)^`+bg~Jk!y`Q;VQa}tcV@2aw%DwzlC7JpWx&_^Rq2p-tB_4 zY9AU7jo&0K9q-zwn|Xj2xM5OCZxPoDC3dz2T%sUyv%7vZ( z*uD6#9L_2>C(+iDBAbbuY}^z!?rKay)E{XJLo$Xn{J8!SSm(jAgdK5mEJ*S}AO0FH zod1i&IBmSIpeUIZJedTv5og6w-m^o@Vcl*JrcEQa)KH#kZtfGRP}~I@$>Evwh6q)% z%Ya$SwDfjA0sKn*e7qQSzov&9o(t?$`e<)u=BcZx*n;IL%3vo&N*vH%8;4;Mt{&;>QefC>vtA)60P1i(19lQ=XcZElHigOpGqdI>^5&@KZux zF?+TY#T}V!Gbx3Z1+F2saJkqLw%luj*~~Pa!pJTHIO?3+(>otL-er?uAT;b{b0Ck;$< z7Gi3~e-S-R^vS{p#>oj3lB}DYj9B&-EiIOj`v6_}M9jL>MPhtB+psB+j9#UJzWE;iXn2K2MF5kkzaZ zA$Kwn<6F)*?Gp`Qg7Lzc^mo;KJ<0^QzZ^6;+(y-;Pzg3n?&k?Q6KPPksWS>aaEufD z#Fb#tg7>zh?^Pb!>u(+^-*(GNUUdx=28pJqqQSZBUY2ehmC45MB;{g+tY&j@pwIvT zF1H=oGa%9OU(yLN8{6XGkC068f zYayKQ%e>d!$;jgSs`){$?^hZCtM%XA$ND?`jRdP~6VXcY{20(pDRhcav)VQZLL$k9 z7Dd18h(EGeM{vk%@|}B=u4yALxmU^DK?7uxrFUofK5);El0qwe%C^W2cE)DuK*=(S zbfVuI$-f@a=Lc~Q4iIW_RrrsENp5M@4vz5z@AW~#YQzW=ZeBZiCoBAhbu`S%gYV{@ zwC7+(50lmon0Sh?%HMn?IZ&LHovD=@ z>^GKm*YNk@x~t&ZYP+t`UfONfvsWq`9AO+;mwO#BVEp+{<>Bw*k^gsMlMm#I3}!=% z8ZBi-Q9+omwRcuTBy(a^4)}=@`;X^PpxiqA2b^4XPQ8rc4cUcwQ4COF% zZrL7azkn#c+tPH##YMD_;+CMhL;WwFK7rS<#uaKDORYGa;`Q<& z=B?1TO)^QZuKCrOi$2|^1MWf`L<0h&-h+Pjm+F98-fwW%BTUCLFCmL;y*07BDEo#BNTVPA|o2ULM$vsN@XFw3~1M@#juHOFi15oUu zNL&0gC>_Ba)qTTf&2mxw|)y2rwA=_YWAFL_N3gcQcC6pMEbk zvipnuSFa>K!zeh=xIgJwSNMksjYp0m{B^k?(tXh^43GluM?Jlr3BFzr{*iDP$omx9 z^tbo%QafTr1Awt$urxz=A&_*e-G6_gp?+4aUh=Bz&#lg!e}C(Hg^rzf15=aBN=qgH z1IV9cb1$$PU?nSzCxVW7GTTQ4u;kx%{AZ7Bifb0GqPh%5@hTp_%{v|wN`l8l{*8@) z&x3(iz<$sb2k0Hzpy@s6iu2H!{TcA{-x(fLOnn<^6ucS%d*bFlg%b$UWR^8Ub$ddT zm++9Wxy_F~)tE1**b^Sxs&8~{yT8(L0SouHqhy?QB!@C1qSGpRaS)n*qs$3X+-9RZ z@;U4|2+>5&5Js*rZ$x`E(rH%kc#H=?Q}fdPoMuM;PpyYqV~7S^N{FZQHABVp@p71u zB8xB_MBIf;Zyz#m9|_um@4bJ*^dB7;?0c&>a4>uXU*X|p5FxCFXDj~<=tS+f$vnQ> z3ZU#THiPx+R@VlvW1Lh!Vu%Pn0py+mJrmCW)He~qdqxu-C-9t4j0q5hnm~JR*#B1e z7Z#RirRusYp1b)4x+i{m(SZ&V&HBRNHN-a$YSbqYHwI4;+2pX(QnavkHaplJP-fDB zn-y$_|JUMwIQ`KE&d;Cg1s;eYSpU(a{*d^?_+Mfr`F|L?fkS|cUr&4^KkD2;6T1AY zsOMbtaVTwTvQX$$YE$KuDO?fi%FMuE!N7M_wP1UBap&&czzqx@j%Xb%#!)^>$8$71 OP&?uyH9Xk=-vj_Ua9u%>6YMc5N^CEPMrcsH&)<2p}OL0k()A06q^q1u)Uk zG0@R4F)%P5Jix@lA;QJM#>OFkM2JsBO+iaTO+iIP$Hc=*$H2u%Ma3q<&h?m2P*9MT z^@*e?zXXqfApdV5NDm%7z`@2L!^I`zr>COl|8IZbT>t?l(h71I3K9c=On`(!fCT>n z&;kGw8e+A-7yQ=`5;6)Z8af8%11xOBfTo84G7<_3GAarh8Y(JcbO_@A04f36BYHkr zbV8ly7z`dn{Nc$3n2d7u-Nd@%M@$0Np06HYk&u#+Q!ulzvaxdr3JHsdJ`sB=ub`-; ztfH!?Z(wL-{LIA0_Qgv(dk04^Zy#Sj|A4@V$f)SoZ(?FoQq$5iGT&xp7Zw$ll$MoO zR5pBQY-(<4ZENr8{nFPzFgP?kF*!92otgbHx4O2zvAMPVb7%MXboB> z3g{jYTYHXUkuV9aG9Uj&?GI-EHN;;1k1+c;#Qq(xIRFO*39)%71b{SfdBav1hV`%Z zFB<%d2mZwa|3C47MDk~m%3&KRS_yAyXE?CWp?6&5fYc$Sq;(yP=yDEpv8?)|`V3Wx z6u*tA{ccCi88C#5$UPz=`5D3m$`h3WPy|bDICZGVZb`2Z?WFNNKP~7@597SLVv)4 zS)&a3tYmSX$4&t*1TH)cNNkGu>v9!R>zhheQv4qT*p99T$+u2xVNXmR2kWXJX9@Z$ zFg`I&_ySrl>_}!{_!+G$Ww!NE3#Vwgn~mLGo{OmQjev=+SNVQSdQrvn(6Jc3IFs1R zGZ==$#V=Zo4*lLW$`qstB~~i4Z8%_C{qktzk(B9vgw$(VXsN~{X7eh^I!kr%F{DJX zOa;|ZZm!4%MhNTGp!_g7h9IlG8;x>o&5bf-wK-fCVinA7Hq&2ejhUf%k8|y`dRY* z>=nC>Jh2H@drsW$;9aT-hmhwq&9Wj3jGaj7MP42k~M( z{Jt>!yKw*WTO&B|-2BPpSst_Li`QHzA{njZ)SMW46W+~GbB#qv$8&?o7wNL1RBIKA z=FddTV_&Zgp)kr1!hzaOSWgigh-%+hv41jPc{T>?sbP6n7y&*Aa=XF?|KxaD_Ooj2 zqQf#Wx>3L6ZaJJMR6+DJ=MZQ3eVE*(Thj*qooDUbow8}9=Z$6B*Kzgh!SmZD^2TvO znDyQjiO}fYCDbIe#0j_QR0HQaQQH$8AEow_kgCa#xV&~T^s7SRi}$oSE7pj8sDpf^ zCHb^;%#zE%NJbO79{~rDYRQfGMlaiQiy*$ue$C(K`gg)6h;c?d1I~rUR9!j)vPYlU z`z;?OfhJpE@^D~J7t|vQ2Q)jc3E=?5?9}W_O+^POW7rcRp52`WzK1)O?;&@H#Y#cc>Ow5?Y!*O&JHNzo zj;y}xs0F4keQWL?@80-2+yolvFvz55>=+VXshA+-3w{x?UNs5__`gXXzyUbWH2o9C zue=UZSOCo*!U3mRo`yOUDkC+Uk_W(GP4Ls+aif&GvG;v1RrxoI69-@+zAj%s`7IG}&+NyYgtvMGF;I=DxhyEE8jZF*B?buaW2kkmMiD!b}YfLI3TS`x90U4i4ifxEAke>!^+ zK?Y})wT_Y;(-in+TRwUt^xWe=mybH~U{SSqxxYcL9(RNP_uRda(N^jTi6jiw+N$w` z>vtipytaYQuU2|tFFOB>_QA=c!2*qZ$h!^wEfcp==#W3S)*GBVns*A6#!oE`R7xM9 z>%m@tes68N-JPJ)z3TF{He#&7+UdEFNy%!3uO!*`Qs_V{9O#t?O+ny5IzDV#>2F7_ zPU+q>@$~(Fcem(o*yd=L4Kb+uJTL^@cxJA2Ya=(=B+WT=`=Zg0#^PXJq}peW;^A1pJeRbdsJO{6)%ZfQy4#rJ139 z8#iaWzDgCx+18hA@%uEjs{4|&Y47v!8#_oq+%xJ&vz30N+5yP>_9G3Re7ti zF-;N1Qq|YAk8?yMFLycp3Fs2}nLwGSc#2{M_flPr$!*^jdwnHwA7D3<^5?vpf- z^M{P#@6uSldmxJO642Kub%S)CUq7?El&LrdLF?FMS(<>B>=J>bPHavJVimc>^vDsg{4VzO0s83&E zJo@Z$R)(p{vlE#eD1hVx!f!ad6$}bdXT5V|T%*OUWf;Yum}}oSN_gYmzr>GX%d>>h z^2r3;+pP17WMVdik+@`gAx^2jr#7ZT%=V=3RqCOAQ>dh!MoZ>sbHm)oM=$F3L`l(_ z5&L5_?9G0eophqrQl;>oc#yYs>UM}?bvu>MSnPN#A=+25I>yVVjM?_b&wC{TeRh8N zb_tjp+{a!+9m!sut~35Hh6AdN?FQ{nlIR}MVEYL|cvkj3A!~wQMW@R|W0*C<)x4my z6CK!fB83sY2{|+yU593!W%9N8+OuBo;T^AX8kj{<$yOM>{uxu5ltO(pRSmMRs> zx&;YsJbk1JxMScZrnOnRx2-1Fc^0ELC-Lp1NkE&k>?9|dxN|BnoQ}lL&ui^SRP$4+ za_~mLC`W!0!CBh@d>nBCPz8RixyumX&kH?%6p%ww=ucdRHSWeAS z2rdFrFJq-%VnkokOslGFyk*(6V%`7(@lB>jnG3Qbg5D!R4|HX~9ghSiYWp}w!ehq0%JvlL6=?p?qP=w>4 z4ZiF=V>H^+KP-p1Ajh)hBiG$IzySu7jF0?$idMsKKJ<%H;RchQN3b`^7 zwHYI?G1KtFWe9%xJTYf`IE#v8j|AJpu(Qz+kF@EMJ-O@q^A;!7q(`sm{xq}z%8>tx|>~t@?E=%LabDM_V ztAGR1i0zN-ZMs8e`e_!F4bDByX#?R7a{i z7MRZaVJS+5LAqfR#VX06*nsWVho$Q3?kFk4_!2pHhIIexkL5$7E{}0I8tYsuzUQUuNXc=XyzT-BbTVr*%9luFF(P8z9 zqUdz#p#a!fADiMU?`Y0tRXo=(JR>nS`bXguYA5X{D=8Sz+HTYg-5LrBglAn_CqSbfdA zCz{r|N=AH-r-^hdhW5~ibjyeO04*1qZQ^m`fiO;%Z#+4M2Mq_vRki{CySWDm#!Lj| zg-#22O^PbbwI;(IpXjJ4hIBO+bE&C1=QZ~~1eJ&3kDA?&t}73q7v%b;Cu$MnIt1B+ zOL|wID~e&MNILLk2NnqNE2s{rrN|>aC<}NAsTQJ(e*eaYKO z|EexiJjXx^yS$)rrr<#Dld5se5XWp;JuX`VswRCV`(oHRTawU=OitAnQAr2S9M7Jz zO2J`tVhc5^ARXdC$=$ifj_-9w7FD_WbH7Sf@dxp@b=7>z$KiP2mn*v##q_O1%DVQY z$9{<>F0fkq(Do^^?ty2mwN#~-zS>d z)+IB?1&`9`DtKa1oiUb}o2W{5?D(IglZGQCVlP{etOOPJpXCV)`vs`OQ)~jo+?e z1kDHDtC=V*q0#;5ptg++#ci)<4MF(=+3oBqJ}<`k`985nKGP8C2eOq1>st8i-Bat* z3O4tg{jbV8w>9RPK8-M>s*WhpOu5ZPy^n%HfK>)gQdvA>jmZN zVY+o*G#$$XXMEqyMJDKBg3DLW#CYv%r4Q%ez_G!Gg?-To(Y4Y@;ObRsQDTyL-W4(& zND#bJo%m?7nAZ*J@5nMEoDsY~t0tAygOM0!&3)U01JrPULVESZPUq<+!aYnx%m}bb z(mf768oOOZfa!UNxzM|#w1U?7Qch>tdYgfO)cRm7@u;x#;QU8p^E{T;H z`xM^&6CHH3)fQ9eV6VsxRwcow4hiY4IPQCpj0Q#&Y85 z2Rs#?>rhUFVEjBmohnKWw`#MNtT$KKD*M=Dc)Bwwp}~gc8&8aydgWbXDL-~mY!#0~ zgE{87mr#=)K;`SlbxcHN$+bTx6_LMHnmVHc?R=VlRQy3>_1zO5=71OCm*P38r%le& z)HG0RZXQY}KfhT1E`gl94zHW7S|i=Ze{^o|aT&sPrvGg_kHKOc7yiNRs|0_E1%Cl2 zmKVleXc=?(6LN7NL2FFUiN*zd6-z3uuqF)Wn(|z`urgG_s2#yHjZ~Y@dF;9*pv+VA zF?Rh!)g3naaFvP)uF3US2$OMfa$}`&41ERkG5ZR@4xuAf#M69QhsLtP zfstJxpyN^LlIa&K(XB7BE3s?D`GfSXTa}6U{r2s|HSEkxGP#!ORdofbA(NlPdDXa9 zGW7i~FN4Cb0&EN)O0A=7Gl!?K!=ce)6!!gnz6NbVRZhNmPikCFC}VTyHj3i2jf5f5 z@oT=&6LjRV6uO=4zWTNW4522X@QnH#6F;S)wV%WBzv7+tJR{n(q68h^XJsat&5C%48QqL%F)NSYkOYiaG&(ov0@ z?NA03bkWbg*CApCmTy{?U5|+<0;x(Gf|n0PSkL;rVHho@<|Ix#pS3q~@_vC&oJL=M zba|@VIb~U0Ga}gl>Urn#N#zIeOS-lJ*{%wfYZ9Sfq^!r~Vo8!3D&9X@-No?G_|ff_ ziowa*(Gco-j))l|B&v!n7V&A}mL!^K@4?UKl**DnvC|bk-s*iGflWWzN);tXjkRfe z*z$Y|#9@mvSA!M=LwK21|keC|FThw;?*IJ6)97wbeNbq%$S zD!D3fYv!b^lb+TZo2)&nu`TQ%dBe*0X2aj?kw^HeGL%~Kn%gy@&UdR>3wYL?tQ!IM zxbA_bZ0+)C=qe0GsEI4Ytf$1dCm%DNsz~;sY9X5BMK4o?$ew)ru2t&9YBp^um&}sA zr4l^F-p)ebg7+?F_sNdAxRUEVB3ct}z`01MHO2VAi}Y>&2J%4dw)Qs3p^c`I6ZK5* zy_|+oH$}+)$S6~^Sq7Pe>iUh5hH)-IWAs2TNyf!qGc*4~f!vxeuwv+! z1$!b5v!V<99gH0d+2SLN4rY%HvLz@!0Nq6Nqor{>gXOGL6z`=DWTl@PdOlqfV5XWf z=XlmWW$;cM+TWb_gp9ld4@ZmAcaO=oo|rc;c@Mj|Hn)Eg<{FW;~>`j4mO7BR){Bgn zIG_(nTx*)&1mUYbK1y5qh&rbxS1b;3N&Ly|K$$TYIh`PUZIGoA4`#fp+M-`;jKG2( zogp7S3+GFs^tj`!^iI)O=u^`R`f`ek-KB+0U2|NG4L01ndD+oSd~!As)FSlbPQQ~| zGCJq=uLw?juP#{AzyfYYcnBH(ppk@XmVrl`8czHP8e>j9okzAOpgd-!A?Tqm%iQjk zrQ|#{dCD2&p)*Hi#;qc3XgDZokNl{y-y|tljKYT2=2{z>ckxCCoHm){?ul>WM8zry z{2WAqumqG=ko7xUlM&&t4?$Di^M%=gHP5NSqM((_oY=L24I5?a8gqk{#TJpRS{YYz(r7uFlx$osb{ zNO$^=;lMzIQN1i($I)W-r{J$Xtj8wDd-nDDW^AHPrcu%wHC?boPLcZj%1M>5@I+0H zDc=tn^IV^e`~Af`(+z`eV;(AP(Qi7!lo)$7wq`PD z++Ba3ha8P9$uQvEQn<(`&dmJ;x~_>N%u0446BPl?EzgLjG*b2n?vB<1GA3xF1q^u^ zRp0z8Zm_DJ6vSfdMt*V>3(gD7()NYK9DKfao1;PVI-(MCF;xlUNFInw7%$%5sD8H; z-AJL8(eP{eNvY>^t4XPvBe%OZFj|Dlcj-ig%jT=K&=IlY1cV~FMV|-#EP=EiZ65P| z^+kAC$_VcyA3>!5u;ITe{1`p*z!k{~U)FHB&uh%prRdcU!Y8P_qt`Q>6^mQbNk?iC zXO+z#auOXXgzBn&AMZ|5rs7oRN8}E@Re{`o(&u!;a64x>m@TN)oL2q*E5ab(WpGe+Zu3)>nXAEmbr^5P;f$7-Q zCHTJ7M!!0%q|#iQ$7Ihwp((J^ z=|>yC+I+7lwlPUv#NKAEEb$88ghw3L#Gm+4k`{&#-B>|B29nkT<*@A}N9Vu^q;U3b zj1nd0g@^ao=ph(F=I8cA!LcWNu!oO9CPvg*Ap5i8IY~|?r-Y9%N%z*>7usHDr3oMp z1TlyL8`EmU1xn2cwNs{D8Jm&|Jx&_pw;Gx-Sz0hZ$RD>z@UOMIi1d%rfL-HChmYy7 zw-_H)gATLDj-y+%OZrkma@RiL$OKJw_&KWDC^`xp~RI*5PNyiN=0Jh)2?#M?DR?r+e!X6ed^hDl|ZpU zALC~>LO)-OU;_u{f@L8TWCvj5{qkD$h=pnCdv z1^J?0WF>S!6vCt)`{5ge$I#HWt5N{kQ4v*Ll^j38i7#a%to2E~OnRwwislv7YxLS) zDRHQeq*<&(s^}@jsMml=iNwcBLiRqz5e?jAELGOoKN^a(cliS6FA6IC^-3qg*GEM5 zt-k(NP;C}mpIHJX7NdxCbP?oW1O7=vSwAo|TM)Zg)}9|me^PjV>_g8eYLjhB=@ERe z@jN!Zj*%#AZjwUyeY_fd^6;zt`}G{7c7F)PK-ahfv|K%fW1vY<6NB~cb|(E!;^=I~ zg0tn3((PMt_QH0V)(r6(ld=e3*ZL+AJq@m1nInO+xib$fBbB1#t?}-uJfoQTJlzI9 z*YS-}t4k4j<=ST3>RyrK2>zYQFOOekcWkhMnLycsh16Z`@SB-dtF?F?56`0BncmNA zzF&7{fIyD5qRbu`ckr9WwlrVsyP0+`oh`RU8^-|g(2G##dr5XPUkUDD>UHzTkT=_s zCbD_kIx0uE{d$C-$K8V-0I;URMlYFt|3|@s$Fj6Sk>qf|Aq@_gWWtC+F~xLb^g6Js z^%J2(+63Ts0_6+imCYJvCNky%9Qd3M($f>^%_A0YwPX;84{kuYcbW?*DaD9?dbHh# zxtXiHG1yNbgR!F{-AhY2sNztxJ>D1eLcYgMQnletn0Q*E%E2g-O`r6t{@yjy+j*&m zSkGYgnWvr^E1=Ju^A&8^G>`MJo0?YoTtgeM(nBenI47{R`l5nJ*ieYf3?)kEyQNqg z%$pBU9W{Nc!UWFBc{(1fnYINF-L+YksuzCpBcftsiHl0K?4i)VA&HV~nnl)n@;1y< z4C@#UQ0`{>TJR=AUhq{BY!UB)PqQ!MW=@<|u0fKd<>IX^gVc6U8P4)YHy9#l69b z=LOp#%6V-wcZc~=g0!qQKNSur3V9oR^T&<2;?=V9#78L7B8qmgk~+kZ`ETn_wmlV1 z*^GQ_ZEj12u5#{)TGVTVAdam*=X_kOqsRZqQ5o{=qxRGC!Y0mKG{fWuC!PH~YL1)? zSIQD3&1zcRe6#E%g+7zS@lPCzEuuRJVOA7mHk|BX@DAcQj>4L#>>HSA)?F>OrvSk&u#r{QVHpT_Z1>lD1UT$ zAHm&urVB*~D_(>>jt`pUpGTPE+^$9myfL}*Er@KC{+r)H+<(@tv9%}MnA%g6(}-*JIwDc)hd*H6wVYcoZGnTi_Ao;qCV zo`*@VxkRjdo(Q$K=Y&v@XVO&VZOS2^6RX|oPKrACf;x%~-qbwLA;c-a+H3j{Yo(^Z z;v{@(Kr(Y|!ck#I&NUEKni@zioU!$uIFtK+(3|9_dLxGl9l)UK7)F*7wAzW7Z^+}Tnm&L zTqUMoC4CbeI}9fjZ&#HTcM&F^_Yd1Vt>|ZONqrWS$trI)&oYGR*IXI|qArWbUQcfNwt8{SLJJF8?7SE5y}s8>-tF4x zYU}9N2&tJm;ApVyDCsM3Da}xGkU;gCI%dzEc~1q+aK#cW+rSo_^mj7(;SEBpw-ccA zN_$?G+_M4JG!oIvyU5hku6qF$!3#-5%dr-p+;f#H$~ zPC8vxE5S{2Gx-p-+opmoqpR1wcw3MvPW;DVXfNgoT8&H#8s%Or#~ZQmS6AqHDn-0C z%ywi?7Jkh#vR^%)z*g|YU6e-@jCP>#9o67pL7s~r-hzPmjfRMcwURsfOj?%2h zT+)vq!&Aj~S5B4DLzEDUIL(;M{A!Mu$z^TD9!;A^ zIfM>Rqq>?id}(xX$>@eKUD^a2 zjmfB&#EG}tLl(1R^4#mjI4|rgfhD<;>T7oD-0_K&7nr5ZtS8^8tn1xg08|v`75YBZ z-&Be13`?iWB167_+(9 zzk~VzCj~EvQ1Flba|OS(f&tNZv^j;8>!Ge-cA^%C}GAR?;L|4Z%oC7&a?S$CYBZk(6>n`Wp;s4mYQnOkK#E~ za=U6_(FZ*@2aM8D-Xu^bjAYSt3j=rZux<|j?#ikVkL~gR1HYtUxpPeN|4qw{@I z6A@^}ICh<96_EjfnNLo=#X_5M1Us+Z3+KQ1re~$@edma>HilnB9P+ZynChXO85o)h zdjtpYK;PZ8;6PUtBBVR5I!`XT@S{(!W8$DT%fwcE=2v^p<2y(29g7qt?J3nH95|u} z^{2uJd2vDC{r>kfqUv971#>r(3r!zM%;dU1L4Kx&KMWarf!5dZVukQBCwVE3L*onBeuY`=)uETEQ$w7(E?cL=Y%JjV058i=gM& zHBlfPH5XSO4n!8o4ctZT!-2!2VmIQSnEZ2nNi?O;mBfEhRn-?s7K`u&=qc9ZMjG($ zV^bx3Wi0=hKu;G+tbaAr%Luc{puN1(!%qz6k=_!qyOOfkb9#x#{wu*0B8L4@xnjqd-6T@b6ESSLMnln!c>$ zmY$bKKjY-=m_~(uksjqp+D>Ps`5zZ5Op#|yP(kbWCb`8HLxf8$-zs*Ul&c={&DF=n zFuzj#ISM1LZ?&o6TZVpQETqppI67tQYIhBUT_VhqPu}Mv!4lhD?QZu?h-go(xuh|| zCi(9spHt{h^?jI>&?8yvAW*lY}q62m(S86tDj5Qm`6_3STPY=p#; zjxW%dU%JoqR1S~qlRisg5t(|l`(ljSY+#-e8t$vIXExqF8P~3af1%~;9p{${tsZcf zbjnKHTdL{v88Bget-cA#Y~A;+w?LzId|t7CXf`ubhPV_Pqx*Q2}eR3^^3ruIIWNn^iff zx}vw!av}1$=aW&x_RrJv;}Xp+I%*9T77&F%Pqyl7c|Gxkvbb+)&K1=%yr7?$$p&m@ zdp46xDws#+hqfwpQ&Ux*O1`P${2j3F$)g|@9PWHUvc{&I^)=lr*f8p~s&|GzHK*&` z>_gEC)@GzY19Rs9-GLG*y&WwK+|QWbF-qULN}o_br)~YUZMx^k&L&q7+u~$$zNWQi zwYb=W5SIVPlALvq#UDT7jU5ItcbGb|zd2RKEfbhvN8>zY18@KVnfF>xlMkMW_>DPK za*x>Hli?J+F0pQ+9&?!9zLktB0g%d^7=Ef!8{5-wypqvW%;pj^>5JW8O!j}ib zCRK*?ko#=bc{4?P&oAAEs~p)ggacb`5V!iJ27^(m+ekNn$T{n{HAP7n8{>CPCs#S$ zL!9f+-Hwa2xkGA}o8v_8Pde8P&u-%$xQ_EQUUL^pn&nG}Z9`syoDfA9A=RD22odb$ z7^wROtf?ApBmWuGYA|XF)h_#M2 zXNIW71PWST8ppWBTfh0IBH%KlK*+6b9HMF9`Ipo&FlRGXEsCU5|2ew=Ay_ZAUbrlv+>&$ATtmQh zk1_RbPwcz}l^n*Q6C3Bk-8KF`thF%#6lHV@IrtoB)(sNSF{5qo3N;wN?tI)I;`m)) zDU{}w1a9eu&Al5eA#i6e>niwwHT-P*C70H_AvjQzU@oNnKah&r?hb7E*nFe0&6!f? znVNVC;#tGlFYDUI0&!sH8O#h|!O1Pg*&_L|Lygn>-PhR;61@uj6^t4@b%E2ZwUT=n zBSKtW1WR$r5)!tFk><8O`YeaE?4v&(Foyc^WMdW6T;Nn&JND(pNsKO}G1E$QS zN;Pg_6~~W!g73uWHM>SrH40->tqT7d1c8`KlM|<#2(zN<#iYORr`D(-Bf5PhJS`sJ zUYN1wmNv0@3C{ASgm~@bVS@$>3gN&^B<&5ilS%y@i`U8`&dtsd=w}lo_PFd=U=k7g zFGl`jxBZjwW^UJfJfZPib>u?5^y#;D0w?x<-|X(0HG3YLMR2yjL|ycXo$0P_*GCyt zl%-lFNawMlG>WnO%4Dx$j~Fpj|2!V%!?By3x9Yf^zGHBlXYy1`@MBCkQ|{-qRgVXU zH-=oED-`}2VZ!5Ue){Se)q^ZHqRV=g5&w#v((wsmZ(!k5>K zR9SfVk{|)I?HC_iCCzCcPHb6;j6)(UTO6&-Mr6$(TNH%>>*j*8MTa-w04FZxZ7gM~ zJk@U^?un!3178^70MX_FEU9*%y9$^p$P`X}A69qcYnY`L-FOuq6C^*0XX~ ztuacwuz<3HhmpxK*WEy9I_g2|<1_MnjJe@jPqG}Iz!|*`7YvCRT5F{oq@C=&*K}4} zkvKOft8HUp-iQ>GIq1x3#=xws*X^41#_q2ciuwQKEdFDTN&vyQ4@l%VWQB|h<6`kf==EKU2_kEVDA+wSfsR^VNyFsdqrkeMZ~$?J96Q+W)ZN_wv&u4mlc7y; zwrpPDc8B)rMhjydLyhv1v3Qn@mK_-i!%%jAUb^?(p5v|V*goB(etLIyu_uf9tw*q{d2(8Zes{0?xzo3YXGh%` z)8YG`>}#i5(kHthKGi|IUvOZpDY|rUCY16N3%4>9szj5+{nYDNT($k=?hregAYTFb zXLzGG8TP1$rSs%XNE*yKhvy6qT%X-5kUSMvfwAqh7|Cr};-GM>=QUALc@NIVHu3f= z+r598+FMYpZ{IB<-Q$sb8B+C1q*?kD@m8gIuFmc6Fz;tbGiJdmlUvHRsXe>$PMeCM z&|a)311L)+(xmhRqhv=LrY{v#kXohr!^6Y^jY!KqWE1?x6|AgxUWR?^eAgTnDt+(hkbYK_*I-_Aetwa$rsp2Y?mpGMrWp zMHOAyvw{R72rVJj-g@RH;N9opJFYs3pBA30;r;dTEFg*!Yb}*>W$$%#t0sLu^V@gu zNgoX6by>!1EXqM>$v^Hx$p4@!=AQ-h{-eKp-tT^ImgJO_I=6h4&0kfYPrF%}iQip; zxcpG@%`t0KlVcnD85a>gp#77|;}F+<`Ay!paR1(`%J9~W zBarGRCn`PDYw(O{Dc~AG(79!?4$a&CLDRep;aiWwSD#Wp0$4R)k82E}< z!jYs1_DuTz@>#V1YvoQ8w>dqSKlob`Mdutz+gJvI`9i@5l)BQ>2!kdBs?usP&Mk6) z0tmNL}#-Xm!sgswp2Usb1`Re{55tJaq1-d{L!@jAxu)2gZ) zhBsf7kG7o^_8Jhfj~gizNLK7wYdLb^73(NSo!iTB7&lJltkslcj89}MNt($Ew;|m& zX$DSYFjPIuq|&n;chvlOGvA$e!srizbvOSmoOZ3dBS#cY^S(yhfusCu<#cc6mv0WP z|EYs|lJkLd^Ib~dv}qC!e&Vi%r2nQ)0vRJ%s9o1bVMFj)iw8W7nvrF8`c5Gy? zN1|?eSQW#{^q}p@!Ia;g_NT@xwA5IKSaol~t`rrOa%>8$$9pRE34ma>`s7`6@yR!b zuwenMld%$(UK!lN_l*^%yKZBGQ(7;Cqtk55~ECqDNAnLj>o4ceSsQpxYz9@sBy>(HPq1ovLlJ zClZfrAVDr>y~NS)(#JiO!-?hWqf}>Xs2z)2fXmWUx!%bN>Ol*>Y}an}fd0TqiNipA z6-7gqS$1KpxNnz5-|~YSm}l8$IcUW)zvVx%)o9cS*CwwhbqnO7ivJLvEm~O`C~?Ch zrD(DiA6tQ_77RcX3%=vA)r~ihmR?HJI#lTW@j+IfB2BTi@`1agqld?#dXN6*_pBnK zF*ZV-cj0?--vppxI@t>4D;Q1Yv4}fcn(PA*`(DwC^c$K`76aYy^~7%nRFe;6sM(Jf z%8AsxOPq2~+t%7;{<;YhF&Qqj$aZK*B2XJ$KciKubx-3~T2c&J-h#SK ztF9M17Cw2Jk8|t>U<9~(oWz5~;)XQ+AzB}hWw#<+BEpk!NyyVL+ynJpFbDgb<5 zIuwlxQ4^sh!GXu)9g}SNT;c&m9;M?l`{EwO3ka_qZNb4W+l2^;E-ltC*O7v)wM|DP z*Fk=R;#DjUkG@6XA@%N*-_@bqyZDAsRpqb$=1X(SDU4{tPsuj zhc zx{|*Wfdg`|bT|-(fDy%e$B>K2F#Y?erl0vZ;41}*CkU?>mlF;E2nC5Hy-Iv^cn^4Y z{#lJhEHL;7fzC6e2}Gg-@%kbdfh8W|nkS0dUtjiDFu~%#!62@X{uK<-(e^zc_!p!Q zVE%%X9qh&ZzrX3x(egd^oE8j82@&m90BjH|XI-sF++qE*W)H-gX^&#AykJrPHfOdl zj9+?{==Tc$0{SoH?K}hix;jW88w%q;t`LFb->+`w512oQ3jRU#HyFL%qSA0g(fn_U znw)8)oBG?y1iO7dgAPE7?QV4OaeiWTXFi>jrN(RogD0Bo^UMGa^_ z9JzCHXU_f3JoEkd?wzLBL+{;t?_Rams(P#5s`W7SumpN6B_=5bLO?(O832Evhgr~D z5IQO<8Y&7p8X6h~20A7-!6R%eENqe|__zcVq+m)4QgU)C8YWsQ>Q~Rn$?3T0Uoo?= zv$KO~c?7sw`I*?*S${tS0RsaA8w;EG(IaBkXXMXV|HrR~77!jf!V;o45&|^{5f1?g z58_*4`ILy5g!IKrS~_|Lb`DN1ZXVvZ z??lDKB_ySklvPyK)IVtG8yFfHo0yu}J2*NyySTdfefAId@-;9hGAbGt6B`$wkdc{{ zos*lFU-11$MP*fWO>JG<&-RYauI`@RkFi_5F) zo8NLFfRO%8tpAkk54rFFxe$?&k&w}T%Y}gG3^XJ>WRzzts856y(DZEasabu{2}B~& z%Udy?vnd`CezF_Jd`iQ<{NnhxXn&FHe@!soe@L?b6zu<&YX*dkgaAw)5*`Qwy1b#w z@W%XC`_~x!YYqIbuK~tYZv?#>{gm2lL$n9b_+5c((7Iq}MibA;iR~hJ0-D zfL#@Iurqxt_;i)+iT2JK2W#_6KPPP_jky<;h=w(ol=_urIkcV=jC_H(fb%n!^$&R& zS9Ne_9za z$o&ZSpZb$Ok96+7rcgeB$_%UFK^k)B`qglJMQNS zcGyVhszj=p4>lR6KYWNO*ilAUTQL-J_bS-Cq`~XTF@D{gSuD6+F zl@P~73~S%KroY=QM4~VU|EyYmP{+jrozJ%o4w?!;7)yYP7GJJ(T{< zH!W%$C=Ibn=xHJ&oHm(FsL*W9iG!x;BCWxs{BVE4pFTI3i9by=H8lyhzm7LGqz-vi zWbKuh#1y<#>Kv{W7b(J}3dU1N*eyMaEA?@p<_kM?vM#-vd}C|yP=4pl}F z9q5ncT-^KfI2V}xXTN8E5FTWfcP9b7uXcjyN)66^gXq!_|A!5?+=J)=L{0mpd5^^M z0hAF_&4}9yJ)w4ShquNo-q4%dHJEzHoV=(9)GJF^lt3Qg?=-Ylp^?qegl8wej<1_- z%CN=B)r4*cBP_Dp3k0}1m?W)~KdqM!29HE_m}zbW$KA5G2`{?Lh_A`2k;*QE9ihAg zQ8Zlm!}oAcW+==em*@wpP2`r5QRSH-NRqY&F<&s*Bh_ud_5ozk?;uF=U7w@KPOOFP zP*+^9=N^S~=tce(=VLN=?#xckJe_uI@FaUe_TW@SVPdHN^G|{`tLGgbMW3O%3cxD| zTo$%|W)^{#`l*O7$Wtc?F+v!B^p}9qJ!Ku;_z@SSs^71L={UtJKvn7Z^Vf{{( z;<6>S3866C+?vJH_V-Uk$-&n8`yV!g&ZhK2P38?nT?SFq4M;aD6iGGjzSIm;(TRh93+t@od3pU=q4Ttxn~``xun88G({4Or8~j!80o0YW zntb=P z_V`dgGP;4XjOF{RBrfgd0LCP9hpHImumfuvT&Y0OXSue;5kr%-#kHx*5i*Y$+6hr{ znBU8t-=jFnMw&;BsiaLBy=qle#>V1A6N1DQ(;diMuyuntXFq<{yPu$?gk@4TM40A% zX08z;2%vCX+uT;@_Q>lke0d;5ttcd5|G`+UW)PL_0fbocP3&Vl=R}t&dFbssgS&R! z&57Cv5ZTt1p^NZE=C?q^h|1`bN4gia-}khFn8U*q>3@cGW?9*wklC0s#u)GL z3gi@1GCglk^Z1^N4QY1=kJNa6{CSz6sx|qmdhkiw>hT%Jwgj{1b`cjNs(i~ivH`f* z{Cz|EmLab~#2Pp}EqKzt*oBiUe3I8{%8)-!@|tYR-ILdoF|h7O?f1A2d;Ip@G3E~E zwk)Fx?p%zw4xYwGI&ev*FApHV*q?d87ABF7KEfqm{^yBB)d0QU7GB+p{Q(q#-wwIJ z21ajK_9N`es@ydm|Jw(UZeQv>!MMUz1n@o{Cs5Gt*+n5x5c;1s@TaRc@ON^!&?!~` z0!3e2se*kUKx^-gF7N$bH=ij^thvKC2Dx7s^FNFfKFj1=~ zpYC?ij~h%syEBJc+Ja!Zy%JPwcv_-Wjh`-dtbz*#1ue^caho+dv$VGslE#brK{(Kx zvm8M-iE5`Wpo(sZ?s2JTpR6e*}wxrW6HqdifJ8XM!1$ZxlBvLH< zQUn$J!F`YrN5$_MC#Rn^aR?|GpL7n*nsUapV0S&q->e}YTQT=XM*YMR>RLozG_6@j zz2Cp^db+RP*qa1#^Q?&0vb~%pp`1QJskW~=$6fgm*#pP{uEGlIgaz%*xledy>QkIB zZA>$eXZj>TzZMg{4PIK^mWT1=5*a%Pys{^H>efKxtseo}VdlPPsDfoD=G@A*+z;5L zJ%C6aK$tsOq7ff-U{q`FN_s(WI8;TRGP6~W;j~qUh@FX9VVL!?){J@x2uv_ z)iyG7Wu%xY=F6g(=B?qkmo zBaKeDg}-uSo(ot;wIqtoz!&1QN}cXIJgNZ0Ckv;j<(}Vyzzm6c8V zlgp}+luv37gjWmav7>i^GXn%$T%K1Ag4l2FM9Rf53?s)~r84leL)+AH{?h8cTNufg zGIh_$TlcQU%0aQoj^KC*dt8LIeX~)zX3h3cK5wqKDW@hRFr%*_t^B!%EdvpiO;SJ4OteqwqxFMXL(FaG(>Dd`>fqM6L?lIXZ-=)(603@N>8V(+K05pv1vsi zU$#oFruqAtL6Y506Y}1y`?2e=<$im&=9&F7$52A&P>YH^B0e{~K?M!PkOkj@z0jM3 z>k@5~)iOf`pE}3eW;!t44Nk-43!`1i#zG-|z32Wj?I31IXU80zFc|cVa{}Q$Rc2)mI zLt09?r=zfy3^jF=NBS~SH2w60OdOwD@Wn>qcp-fz*;4ppn&;-Y%g-UJiWl@&j=U;+ zthCdTbSwP0woxO+?41j6k{9a{uFtQ&aF^(1M$pRR&?ucmEL3TT#rp|z>EbZ*SXknx zH&#}=>$?(Tg~dnPM!tvejtn7TNopnX=pqQnQS8PA7@P3>z9+u94cc>v3wCpZR!Bqr z?I&UahD57wi?3Bog)!eRhe=f!JAUg!21)Msv%2RvsIYW(IgxPmr@KWz_Sb#StIxeH zO%p;>{ERAUa8;40TQ{FHY7WOY&EPqCm>bOvf`$bf`Dti;!7oBLY)|gOy}Ls6A-pZ# zu{YGlw!Zvu6%#Rsq700$Jx*jq$j%&`sfHX&-a@`y&!P>l`QEc#1$z;q%shvor;}GL z`?M7G3%WWKZWS8}YDnB>raB=upY=O0E3XnM@{zm5J{==5mmjuRtq&h0nkuz~+Z_kv z@F16p2y`++#DcRU>pizIoCIrmilO9Y7dQFU)sK59(=lC@x{E+Oa4=N!c3fRvoN|1L zt|d1xpX7z1@tBK=B$u<@-Ydd@^|{=Y7aCWTYjxM?L&34hBK^I8doOErNX z8shSiig@}FpU4p#@Zp4w=!*+Hg?)P|U(y+JLJ&)sxf~uqE)yY!F1D}=tWRGRye#N< z1obs4x4R5%1=@n>3LgJgHBnvvQ;(FrXn?neqbPyecemER6XGLC;m~!E$%3Rpl<50a zwHX}Phm*pK7|;j}&hp6R+g=3(Jt7D}{3`-hX8DbjcePtJS}4 zd2GC^^y}MNMDls|bB;2HIGEh8+k%%PP4fEsskY**IAYSKJhjy*iI?S`hqk*K3W8_z zi|K^E<%9Y*FQ#GSoIKsdw>F{atG6X6#_Xo3{bbz1QB+6?2h@mI4yx8QNl`bG^qHO7 zN1WW<_l!jmgyUn&(IHusD>Vg;@8gQG*T(lYk)Bu(gFW<%*>_a?!y{3to`|RA&$JEM zuZa#4cc{d4j;k88JXNe=V09rsRyqjCdQ;HqtEytPX)?foUnnHbns)>ES;hOWbLT`8 z-!)(hin*VqpCu}t#MKCN+N+po|I!HWFVx7=^E;4lWo@d&8-JY)BRu)#f&^JsYD%=S z@g>89(9|&*-S7yV2DNj~-Jzg`@WsbcYg>hMqlm8@ zrkHgWR`ZXH`XAHstYmkZF=vyRJHj2Q&pO`H2{D*Zo93t>aZt{wZiHAMlEsqaq>gx7 zj6B`(#j=}K5j^g)hs$lM(fI5uB}p{$g3MX6=)An_Co1=>{QGMIgox9YQtTMr2nGAN z3v%WM>VKA$rhjDhMVN2>tv=btCbnN4Tpoi!uUr12PHqv?#J<2= zV<9*6g?43kroTC}uh2abdw?@bD1ba+fIJc8%?M(g766FYKdrw!n-GX3w?MIAthZuM zI=m&CAioTSIYOAofO{iL!<^Do)i z=G1D?|9u zEYBXLik0wmoLZP<6`6i}>gXZ;lwkEm-RWD`iKFnBUocsG#%>)hi!dA$aWHB{#MAp# z;1eUGWa_rTdcy5i3!t#aT?3wS?8hv^>=e9A$pC7n)$0s6u<>@eNX!_Xml3>;UCIuG5 zQSX3eSkZC|f)uNHx{hZuf25;ZegL`I;Rpy7tMCZ=Zc679p8n{kii^H(cKY}{wC%g` zJ7}uyTtfhbsKbd%m2}wE&2U8zbmf?0naxbs05Wmpqu+1pxcSb5f)R$WaodW-1fe5kQS}xzg*4xevDd;{RC4&9W9`a+ z#P+!!ov)Uy2=?jt)HPOaJxReZl0Qg@tWQi`oY!wKT|n?y&D;ywDI2S@%yu;IP(tlA zx0CI&RdQJNvpq>`EKSW=;KWvY!+ON>K5F{ zk&xNXm_QdcAS(sK3`#~pSESij;K`RmOtTkep%yTuV^`$(>nyTh#~+E4A_&R@s5@}^ z@D}wWbSt~K_c;5QtvhB9(4C0?j?4hlSfEvsiMc@;T#*EXvcU@1lk^kf-t?Tut@({E zsNQI~+d>E3nEQ%dWhHjMgx&M48Y3cKTGZ36;hJq5A@$lVGVJrbtU2R*s~fsfne#Pq zYJ4P!E8faW7iVp74Vt^I@ref9Qd{CK8~wW7LJpzKD-~X_{az(rPK6udFLP!f){hsh zFr`p|Fp?_VA^gtej9?Bwlzwv}&SfJ3pR_OdRK7bShXY@CJmUkI=g-i_0xV^HS5iv9 zH>ug`hAUa4tEWr`RzJ}p@$hKLY0bRX78dWAgBV;Ua|xdWm4jbT2CW(Eh^#mF0#|BQ z$5w#Dl%wDo^{p9AiHz|PhoYw#YmLcbstkBn0nuyV?g2!o7|SOslKZ5{&HD57 z$Df?RwXKVj_ZP(lKaZeyK~*|Oko!l4c8$F{Kf^NkYwu)$$s7v*BNG1~WKp6K-4t5n8zxT90Y|^k zMwU|^@kSIAofKV0#Q&xcWbMSpEv#jTEY;dAoDR}1w<+|Mcg?ExtlyrZiY#I@tk)OS z&Gzg8w5yykN13;H*^>R#ysL+HiQa^dz&ic3$S{!GNSiSx;NvrM)_(koI}6<@q{-)v z&R|2O)+hG>|2k1%y}U2r^8k8l-I9>kPi-jM7|JbjY~d94NrKgH5)7}~*4KaCpoCvj zx0d9LhIZ{$`B5ElR=!H?3HoR80HpM#)uVj&=_l@z;g51?anOEXgU8-%s=QYDRqLu1 zcq%SDk{rbH!3QiD-NvRAu`gqID2sqV&`G>e>CjKTs+yeX2YU0WD=o)thW|#{$M7ek zgPvmH=;np<*yPT+7Q`&Jey*2?x^&y*DM;OJ+IsK-R4iYW>~zsj-TF+nzFI%nZ6c)M z<6{sr8e*~}qk4jZbFH3QYPKxqdbvbl#G%qGZc@|`=7#ax7t5(z3?{djA*eRC{c>Q6 z0usrmpGa)?Rblq6yYg){id~KBVI{IgeeaIm3~;YhTf|zpH#c?|%%nSY@|evit#?YS z>bBfmEp$Q2p<0Ma6RZy(8aULu{`>JMpW*%N)HaDO0}k%cY+XW(zL??>w4!$lGpj$B)7|Zdue~& zdautH9H?TG{tfi@EAIJE=#k|+!dOMvCd&w+ACG8eDAP^84{m%IWPLZXKci-QsN_P{ zfp=xL;&PzxTpj280aYquf1%W6w3GQNhP%ijVuH}~@OcTw^EAYin3o3^0!*Vn>kLbx zBSbB+-PH={$nVX4V_Q$JdRce}o>e`mkwmn5?JMSX)?HHFs%q`N{LQ(kE^m6l znE3L}Tx4cys^>oTQ=g^PH`o*c0|r0#8p6B0HpLLJO0no;4*E9bl{4eeog3sjWa%6& z=7`akTngT;i9C=}!sutS9nQ^<_n3-PuwrW>9TuqPsYpnA_E2m;+O*@V_iM;3;Lg9( zW9g72ZUfuPF}9>J;XE=@T^+L|*h5&X8&CZ`lSJ=hotsZkUeZ*&2KKf?_jhHr3==mu zcJl~L)TCtzjs;WFNO4kM^YyDhRT!C~pkUJWz1_JJ2Zz9G!2ugIsncYQ+1`i%#^I7m zi3LP199XM4Yf)r5;ik>6ay%;zCyrlQgc__Fc0>$sNwcmyOy?1zK8$>7+O}}#Hr1vZ zOv&TfD4kQm;2rxhkz3XafYx6JEWZPne_qgX?T|lUgF>h6rqr8KFZ&XmhXKm4Dz4ki zLb0{k1m$&F5ZuL-e=mk4&r(l()_t>)Sn0C_cg=75#PjGH2x)eW#gD2oi^VsE@(D<~ z;^|wscfEPoX`OjJ_=L3sNgn$JD(~Wsh6m6BDyw~}?Y;So?v*VFG`IL`vnEgk z_t!=x_0h|K^mkQQwiRn+0dEF}j&D>t*bm<2B$O|^k;=IeO)m#YYT;rAsh^&eG_o773R(W(EcjGQ@VJGkn;Yc{J|xaM)yycau&vKgy^yLdt+w(?fsGI5Zo^+4WoR6}xPb4k@g>ls z<=+c*IIP$l%S!Jxx5h2+a@M+K_T34u+>2|u_;;%>hg2hLyPo$e``Xj;6vS5Hs}FJ% z1KE|#Per+7%1*4GWV22+iIe!UZj-0n3f#!bPclc6h}fr4l}ZVDL%SMn*@{G4Rw|xp zl_SiZm<5~2G{qGYw&isgQ)ish1l!4KRfReDjW((%grb;gyhK`pgwHKL-Xecn}pP6%7j zZpj)h|CjNw)AT*TEbvKB^PpSK6+@Q(K~P24r8xgM4-Ou59^9La4Y)&SK%`J;_y9us zH(kbRUHnf)cWO-di?dmmN_Gy!8x>UlFk66de7`gE-Q!c)*wXl}Yt>~=FyqezPTqR+ zkTiP@o1GWD9Co$~x{mq&7kpy_(Z;o><0k{o_QjSey-T%4yNQk08|7Y$hYFJJDd&%W zwvjz&_@MTxq>t#y+BgZ=Poc3>PQzzorCOj)nMdX8ro}id|JaD28PY^{@{y_tpse5i zI$HYu`JWdtv9YeAp&e^j4X5C4nReLNM?-wF@%(zG!!9*y@cBlB5O2cTNj2YSmaduL zm7vieyc>D)dFb|Z^owJPAlFaq39`r6nHbe;t7u7%gW$K@s#N2Cog;FjEfGr+zpgRm(mT<& zrC)#cS$#T4u7FBY8pDc;i!@8ieQ#EgAd^A(&LAExW*l4i7_pi^2``0bH~6$Okf|tO z@ttOrf^scp!AN_TuE}JU9}fvX^t_d#LnBE2fck?zkLo$`^QqfO#?3iNbDP)76d_eF zR85h}?6Si$4Xj8g?@r8xV^z7t)R`mRg;W}~%=K5?O)dFU-2F^VY-Udk6Oue<^H9o8 z1X!%f;?=p&7Inec2Rb^Z_vXo?2cdxlOqZ|qY<+3S_s^_!5>YC3^y*+;#Tw=H*b0GMmUGXz7(5?$?CJ4dq(eJzNbg zry{2?_~r6l6Kov^o9&f1-a=z++fBfvC(~YaQaqlo*U_2a>a`l<>LF$blW^XLoL4y( ztorv#H=elGeIq}T$iAh&o>(v{OKm6+x5{~xiE^dE zwDZ?FK}m@mW(-riy{d0&Y$$raIOe{PoLeoqc#nnsoF^j&n{tuA5Dr`LMX?D8plF33 zKs!adDvLHBNzmEUe8>>X_yPwhlM7H=%8=&9`C{*`XCEQ}jL`n%TE(upbC z51{m7$Ixk5Bbz2XT6&cFX_1N#= z)%zb0UQLj9)&`Rijrv+k}1$mNN*4bLa9$Wd=(TW^6$ zAo`N7fO_cP55g;5w^O|59zYe)!=r3$XoL8DCBYJ!<|-WN{Y=&x;F>jwqC89d3g_g9 z4w}h}mqO0Rlzf-$$ac#}Wz$k}cgqwUSYc;ueiZk?{V4bOtisg4SOvtJ+|;VBxZ2BH ze*AR02_pCyY*8C1$I{Ik>~v1q*#O1F;R4@oLoPD(p#~~V#aW{sf z7F$c{7$%*kLtQEFRD(%9tRg$Vi}iLdv3^hT#TB8SDlW2r?2pNuhOjTKNSH zs#p_$jI>klJfTGg{)^>q5;eY!`jpJioTk2X^weo+CG|h z4(CdT+uEV9rQ*8TyHEj>-a&o;i8J>wT(d!IW}5U~h$&GyVW!0{JHx!2TBdn|Xl`(Y z=t;7o4cDv;+S_@#@-3*~_y(y<%*cv48O$K&<#Tf!F112zPcv zj zI(>Mmblr6ndWmW1smSyY<x)8vvB;B4^Z;q6yEf^Euiij2Z0|@xn>w)M}wN`cVVUvn!0|k zKN#t9PhV4-%oVVTY;KuO@x+gN&>X*A(ZMidJ*cT5+gqa*G=R)MfV>|-Z?fHT9(l|{ zUw6iPnwFT}K0TC+`UNuHe81yap89KYXoe{twKn6!PE6}(HHB9N#bX8z9OESRj7hNP zy?riO)G4CuK?c~=31%YnS@Nv>YGfs*EZdXT4n*)5laSYK2vE1MxQ(?UA0R&Qj8dQ|NX4b+VV#V zQP&BdyiaH9L=cY#+4HrqwnjmU_gs}hTx431Hzn?B11t13vK=YoJf~t?LEbA;`TP;Njn+R#gA!&XFXKyLC@qOnKU-q3|krPnP!s;u& zl7OT2jWBoS4-SD+Pwjoys-#lfJ(tfqY){xe(^xSGuq5QC?>>c-AD^+!H_by7{q`=K zbg!L~mYWgLfE2cq6Lpa_S<1TWN+W8gJ<33?HF+8;V||(cBjV{wY(Q~*49|j-HfxZL z7cB82H9++=ismBL9yX^fds+^1TY{?~rwCrT=N_Ay(C7`l)d#ArMG)&fJBE`QzCT%A zX>bymle1++Ut;01&35D7B=<4}VYWXC`1VYZ6?-&G z{{ckt=z|&<&(Rg(#G+zKt}u!g{;QG^4}C9oL#-?+_=+6@)^QDvyiei5cqKSefN|Ic z-d5^3s@JZwoa7D3Am1$Hf$K~ZO0QZ!?#6_gd~7RneLBo=e#ryyXAJJM^%=6A*^Zp< zk`8+i>Z#1#5X)=AbK|kC11~a-?TLP-+?71tK-Bl^Al&{!--#}NO9W{xjf?vmuMOr* z_>JD(54DDGhCC;kuiW=~;L3+g{2uAzRg3US=}sAQT*_R~cXl;%Tk8gPD@y+0Gk)WV z`nU#tV|Kjjm})%9uy|EgA;ES>LORfCxBzqNy_p`#PJ8O53tXjNahxNP%XfXsAmv51 z?mL_Lh|&AEDs07XFsrH!oL}Nklpvk1<^iRdP=9Qqn$7!RIAJV}nn+TrJxiq+kTs@s zPQBcNhKSsJuDQ0b^7>2%ugqF~GL12wDBcW2D7nPC%ZEK>)jw79Z@c2<$wmeG<9+3Azf;swZehGu8Gn&HIrVY zNT*sOORF_-G!^!-}?Vg zvGXVQ8}Ly~$kjf)^Cn=C(Ve@SLu0^&25fID)wX1ff3uG4-O_F&`m77=ooGW-crE zE}@r4H+EeuhSe!zuy>)7G8JR&4S$w=L|A;=Id!4H*f+FD;?Fx`F8!u1+PIuMuIdy) zi7w|5c)^0zu!W>nmcy<0W_PE+jaT%<{J5S%C(p|-qR8IGgV$#{dz!fO<@-Q}q@ZRX zR2gB;e!ZP9^gT40;fH>BCSVbFb6^VNf&gK5z9oXtIp+?LBa=CwSBg*)iBZIt07EGO zcEP_=n(~RLfJ5MQthh%}gZVk4;REwfVK_ls7rB=oFu;m>amRQN&5>&h$az5J)kFZt zyW_d@zJl+?_$I_k-Cw{`)`Dq=2+P7WQUF-XqjYZ_7-%q_s8MetYo0stKSrkQ2!>8R#hV9{j=? zmLC3_%l$vf)hvyE0O7-d)Saey>MBH2DVqHL@-BpX6YL7qg%-PRdVU?4|GCC}zzvj- z%M;+%OoKN^U_QEDPUPpnO+tmK4RY&n4h@`EZ=!o8+uw?kX*vrpf;Y=aVKQ3sg`q`s zLE?U{2_G6|Qg%z{%3n-e&G-M7R5qAW^wP{V{{MXQSYrc|ZuNGfHQ6 z`NlMRkQGFt7M7SJLD+(^9<(~p`4);t0<2l>M$(G~zCy_~$7fm+olvInc2l>SQDajD zz4>5*80M`xXVmI!b3PI|ReiQ6t-?DtjCCVa0h2P`FW;XpwEOe`5!V1?Vs&+tYQ3H% zvO+$DjlsJR?#WmYh|HWt2ZUqZg{?O@V+BqABN}soEj%sV+q0=7iRC9kN?`y|xU0N( zjkL+H8```oU*5@MMERw8_rpsIE(>qZ5$^3ED;*mK`*Uk}BLpk8wv!K`pLi8P4thHB z&3hlFv*Pp%GSXbp%?s1Ef_FLZJ7)$hSA}W z>D?RUMB$z3duTBkK0gi|0IE+@VHshurPt>q$F!kYpig_Q0J z4Sg*`^5woH{IcStl*6RmW`>?r#xerPcA=GDW=d6=mPU-)NDOS4O;C~s8b8|n@eX%>w&%`Vp1QMMA3OxrQQH{znFSd>t)e!ruYyPU60v=X?Vq|z!-c@ zDYd^Fex+@Hv?=ufs#?6^*)1*cFYkH{6qMvp{o{jA*!a9lhp;ecLWm4T1A9zZLm&}B`t2hi!ce%R&W9pks@zVg{YoK1?DmKoeJTzDm& z5oOICw5{#|bfrexH0PNEWGPwzJP6E$6LuD-UR{-S-R?ht+Ewf%2$tv8q$4plPub(= zSlN0GpzV{|g5+ka*&Si`7mIhI;G+`9Bxu)o*!>+8{xVe330(OFx_iLAK|(^~cE4j1 zIB4BP0*6gMfZA#&Si&kkfNw2n0i}QcJ1H{bYdd$9YJoMznecg{ z%|7SSIDXo6J68kC-G)$@CaQ;aF(Dq-yJUQ%pzZBNVGc;ywH~XTVh*g#bOsQ`37dv9}`lp*>y316Iw_TIQu4qnL-(Y&43mELt$a&*T!DYAo^|4bK7W9Jy!LOKZ3C__PPjg2~NN2hGc>yIJ<0hNEz|-}emz zM(l@%fVBnma4q#tR*lAQX7s>jxMOh6whKC1$=@-^b8lV=jEh>4)!Jr6QnzSchmNwQvi)8J|z}cN+#DpXcIUkpow< z-_cFfYXLBPISM@{xKFA_s|>RrT*P$8z9`r<>s8lBw!=8wooFPuqn%{6b6_dO<;O99+v=v`DXBYYORwOP_s)gEgKn~Td7}g860gpk6Q~s2gbn|{Kia|F&GYBK z9{hWh|9>(RHGfp(s#<@kxOPZ=WU67Y7s8R9#4Ir!sr;QvQA~)t`>dGZB)gu!#|nV7 jtp9G;$luyY@~?CMj~eSgYp(xw3=~ZLe{G#0Jxu>!9Oa91 diff --git a/jay_imagenet_for_co3d_val_0.1_images/ILSVRC2012_val_00022850.JPEG b/jay_imagenet_for_co3d_val_0.1_images/ILSVRC2012_val_00022850.JPEG deleted file mode 100644 index 8277c3b0f97accc607ef1a9a3aa09dbf5a738a24..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 15641 zcmeHt2UJwcw(TZIMWB&P6D5gCl#GC62~Ex@C{aLiww0u|1OY(_0+MqOkeoz`5+vuG zbB;~--yF}m_Z;7Qcf9}p{bT$;#%XqqrmAaKt-02$s#U8BJ&c|Pt|-eZ$paV|7{EjD z4?vFrvH(6VE*>rpJ{}$(0RcWCG2{|45fL#p8951ro(9T5PeVt?$a;gFk@-3c9UaGQ z&g-{$1q1}3>>?7veBw9w1^6x+!5| zn4}n3q!{S002BZ)a6oA<1pfVlfr*8UgNuhxKu82uD7yk+VqjrmVq@XpU}J-|{lMP= zY*HLDCSGY=a@YerW+w>W^N5f5EHcH76q-M_S^3SK{Rs%KQc_XVuw7&4;N%hz6uK=e zA}V`VPF_J#Nm)x~ncVW!0Cauiu(mTHD%t`}zk4hlWQ+XJ+T-7Z#V6S5|j+_x2AC zkB(1HFK}T1SU+L?1KBUQNI_hf*w|Ruco(=ZFg?J8MT(8X#EVNN4a0ljM9$3j93LVR z@v*p(fQ4Uko5I}r2jNv#ff=@)3uwO~`|kns{~say2e5zQ8U={4FhJvBkpfb{(J4pD zGs3^q-#Yl41AlYiZw~zb!2#|dG!WD>ee;wAKA(UFkaE!N3M391klsQ@C82?r>8^UL z)${`tP4F+X+mz>c^}CvI6m$5ou_+cBNZvvztD}Lhu!)+LX}1F~aQF2nC|}hk%_7GwYei zyp81o7Sz?F=kR5bKU%EcE*HN9cHRAZ*NGA>>p5`lv#?RbEPTenlNJ?_biyfx21pf< zJX2UCCt>iPGSlsBGBjuI)>mv~{cKbL*&_8D?kLn%H1KDsT@)&OIMD#N7=rr+2)%w< z{V&kP3H}5dYFQwgY(m0;_w!L{~Fc2n9 z&vnj=1A*PafoM~fzp)vlq%7C;azES==&d6dMG&xKMKwrpV0!Lm7ZG=>AT5#4n1OhQ&&wF=By0zHARwT1 zI;>1W8bc1E@vjnIlf;S~B&E{9n-iZS(>z48(w$ZfH;S%zKdoK1s`oMnWZg2lqv&l) zhz|@QTa?_3kXFNt;$jIBw~Ww>i`D#CV%KEZaHvf&*4m)6+z9_$`z&qSOR>B~~Ni zC5w&)!NHp@~F6lXBZ|2`AYF(PW^;bTow)7??^g_3{>hr z6+iKU{M-b3;=QoHRkd4e*OW{xd<%k8h&xs*}Kn`Nq(I z9FK!8)E}JR?`|KR2k_MIYYr}W_8!O1tGGj8a*F;4j%3XLB9R1F_cdg2j5l1aixv(E ztI=cWIe2jOfCh)w);{RYb$q;!-=tcjfD;GltWIpG$n@i5%C*>j2)h`1{F5 zeUG1xJbBF`@FWH!ji{P>pPE{7VA!zDF0GqBI=}MCSFsN6=*K$&0WH3B^kQ+D={Ci( zWNAm3BDb8VRKM0keG(<42v-FJ7yM9euXD5_ZhH0+g?bGT3(Jsq+QLqt>bL)w{pq3VDykG)%_;6A=Z zrQEnXxMCfM(0LRnX!v*a7gy|UjIaP)LG~>nE-j`Kc3e7}1Az$-RpdL5c-sNYVlh`2Mw%JM2qlJ2@Vc zq=W0TUTl5y_(@*R@f?S6-`Yu6U-Q?F(-Zkj*N2}Ss~mZWpQ0!Bd(U@G^RmbLJesJR ze3Q+3GIuUDto7w{AN%Mi-IH%SdS0>18!}g5eHfwExvI`$yWGm0$`UvFx@k0R!QW6w zdxPnNJjtwW^yUXQ^RpXF4<+ZJ3pz9r4sEMeqM9mkqfnSHEx5wLQs86ob10jK+ZW_T zUMmF+{4CJv|17@Z@CCt)L1o|%iQwZvt(Z}3~&vULet7kBnOOmFQS!#2)on!(cE z4$=sdOJSka0Q-uv+AX~(VP1m|TtqIoP4^x#Mi?nMz(n;Fa0A;bAAQrT4U{1q5_fu% zeM6sr$!<|AUm(vpE}YOV;mty)t7xiQffwqeAj3}*@7UTN0#n@hbyF_+ZQBeKpW#CM zaumXFJWmRA3tC^zY@Oj6!I#tX+jr7;_*-MvIZ_Z2e+=*c>>EmbYJp4ExfXAHpx!6re)2g6 zP*`Bv2L|(XQD4mz`5yVOr;jVJui*6|255Yus1Y1rxVGJ&VQj^9mL=Y8YjYyUDhYS zRAfMm<4^VGAy)qtVR4<7zH60x8DY}z8eauKqr%|EgKohe>3Xxj2SzGR)`(zb9Wa;A zkU}sy@Qt60?Ia}^D4i)vC@mt6lJ|GT^-Kb<)n_F)UtMTGuBG+u`&~V4ZeJA>3s$7i z-5FGmP6+rgH9_4XvLwt<_B`KTaN~}U%VBW0ODMe@gdHUCo9L#&q9E6#kiDQZ` zz_|{r*w?q5*&!dg!qq<`&J5(w2uq{>D`~_EK(w+y16kL7_@dA#;q23m1DlSI5i784M_=$ihp2 zp}~77Y28*wiG^nPJ?w3y2||*oI>gh|D|HwmuO`hJQuz1`4RFntSPATy18IZaQ4;q`k>P)kP3Ag95KwNu1RZSN)j z`!S9LE2z8AZb!3sEPIp-_8ZrvjA$}arvi0-m5la-FXlZx3*LSePE3 zbVHjUiJ{CiZvqVzI_B$k8=g`^LzUI!;!MB3Stt>8BvDY9YFs<(@5naJfG zRz*imTvsoqwFya@UF-YIjn!uJ+^DydWf9GuExnSOO}jG}*WfndiG;IlITftj|g{hYWopGP48 zWv+RlpfhbWFfy^D*6fg{Rl-#E+KZyvCesmXA?LGhwedqegLI=@#T;8(^^unMJ1*Dl z(xI-IR}luD>b(*Bh1Yj03N?H8UotQ?uv5h5+YpeG<(l1Ac)8O5RTM8uYpp0Pw>(w2 znwz(VSnolm0W*h~-HrC4+ki!_yY3*XjPQo>$fS98I_;**0~>V;SlgNf8A_Vw3iq3S zx)MRRs)1alF1whD*HLlnZQ~Mg2_~BlU=?~tRdk6TiG;t&fAD4wS7?>H_p$w5b4H6vrIT<*-W8Pi*--4UycR=Y zhy!s2Bg$=6h_XnU${@yuYM#8Br4S9|)H(9nYgb9;V%NK-+vTfflw$DAh3b&8i@;X> z)|AC*T^mI3h&`-?(ZGR@V!@lQpWdwFZC}CV>X>4E&+)8ek$uat!HoVL0`rEi5DAN#JnNt!Dvm0 z{dsEf%7Zy}Rh9Hm{k*1$v4kDWVmYjU#Edd>Z&n|3G4H6vnz(Mnfd!Em_f?%y{;2TY zj5B?AJq^liI=JM)*YU)8VK|p~xGD7xZu;}^{a}>a&EQ9*c0~%;=IBOVMXM2X-+L;Y zb>J9pROcXX$(~pSQ;`*S&LmE|B{^H0o=3LJkH1-3} zZg#B9bVwfK4IzYC^_je zPEl)cxc|gEtoEefn&1fO~j&eP?TFpiKu7v@E?y zyagzs!lKeb=q5&^xf)!$(=99SYis^nr+^m2nHq@pv73k0D}uG>CJ#kJ7k6LLSzh0u zeI+>=y`*PW6P=`KrJDZTAaYosTZH%%1#OrY;H{&ou-IqCTNP~G+H5Q0u%&(FDy4zQ z^^sH35FNW0@0+)yKFL%x-B1wU#=H@!u*jcoN|dLiR9g+vJ!yJe};UZs6%uIANR;PchFwkz~WqnDd5bFLD}RMv|2hDfry#m7(DjFzR! z*G8A@w}dX2JzmA4x9#Stqe)A5?bPN7HH`5{Vb>&4o;T81%9rH1vq{&lMriweu`s5p z_$b_>=U}TU?@OthPiZ922pwYY65n~9<|W6@teTh^6}=8~XR7O(`RcOYa!t$d#l_1^ zh@8yQZUd(@Yx#l|4CBNhvo!@qF=S49>zj{+8wCq2B6`0`i%7sA+}fx2s6WD`LUm?mQif&=(364qQe zZc1H2W3{{@R(7G{r#%ad_Jv@5vP>!|GgO&pbnU`GTHT|XZu0%{Ox%JS^|?1yPMZA~ zY7*24b*#R}Nqm)+r!!L8P>s_@D1*yv-Kf?PnG3q5#>WR5?e>`M!_1rUC-AV#u%T?Z{Zacc$SM~ zsJOX^hq9LNm(Z2mW{nG9l+Yi_R7+yjw4ygGNQ+l+oWUd}yFnQ%T6DN9SvD}za^u50 zzHD}SJyuK+l-s;hxA>ixixK?&+~(<&@2pU!8B*K)#%@Ip&h`>rtDYIjsl!KNDrIiw z>>5TaczEJ&;!Y<6@88x|i?&`?6ak{1kJivTY}PL=I-affulRThQ-V>;Da*=7F>7g| zGp6^(2j;PiYUlSj#Sd0(Ds)4Qfjfd>i4Vh+EG-?<5*Eu#$K*Y!)%4$};R)y4vSF8r za!yp243yfNsd`>dNn8~k&uO8rxf5he8j_g6aNs@GY;Q3u7EctP771}Gz1u`JZ;>hW z!7K5(lfb7uY3W^K(zkS+T}lqgn0QMn@QhVieU%O$FV-g^zygcyKVzNfWuc+d3-k=Hbbp@Zx%pzjCB#1Y);igCFJ; zKpb#mm`!m%pA&m$^j!qYM|TEp9D8i;Zu;`c_C zE)f}py>?yg!s0G zzJ^i}9&U6zHRnr&PA!FVM(PDCPK;O@&)#_F2B3kpwUeR9**772*SD`EjK_~Jhe#_@>-O-)zZr1Rx%O#~d*ch5FVux|w zz8fM;QI>fxa0jvNbljd3wV)88bY{TOxy-l8v0>4+U!bQ|E1RGT9;{e02@5XKl&vv| z55@}OiQ48(W||P+9OR{o7@-g}de*RKd?Lcy-#0Fh8Kt3D*Im-od#C{eUBQToUqXf$ zhc|x-?mhS=`llCq^Ht*Aa&S)6ehJ1Afd48T%kbr$DKG70)9~cUd#-X-iJrt7J84R^ zpS-9z%ro@pS$~ZPoo^E_UQ^p`DO^$2#ePWRDR_D5uD1T;7H`bn#kZQ3If@#ogNawX z`=rw^X>8ijl5^e6SB?l6?#ZYw%g_~z56uvT7!BWuH8-T=hFjenpq)TX69BzQ3)=}5SH6pq%WJ5BaM^zdw8BQ zuX5WK6~CJBqzWVthZ`!1+s)!JTNh~JbWfP%CVi>6^2FH)H*GwxCMwChZ8|H{4Psq< zDL-r!`f~I;nTsc@MAIBJ7>5igQ-6QHKF+MY5~D$g?oL93~x)Lt-lQmg)7V8rBf&O@!JzTUy;Y5P8Dc$-D7S%O+OTmTpkx zY>5JH!D!@nAR}az=TbcD_t=8O)|2}*t$q1QScq}n6OBQ^`J^gecNpEkP~YoTa$nmsaK0dN$xDY=$c*OQ3U3c=c<|0vw>co! zjLAnUUmrJ-*}dZm%$6N#y(OGztiZSG#Ag`2p)(;gc;2sEndh|=Wm>H;0?X|x&`Ajx zL_{V3O64=kM))qXrw^(zX6lsF&bbIli3XV2U(~Nt+oOS$m~!sscVD}ijfzqbTzCI5 z^ncl|U6lLcBq?(#y?gwkXFqG%Ff>=}s{K@C+Ca3p4&>4s?T9X1t~r*kjmEbgTSWvI zsL~5u8$2Vk>V4MGN51HgOroXIAPz24w%6VE!n=jDnOQvB=zW$b@ENPZz5DuHN7y4+ zqXU&9LdUI;c(r#&5!Hej5U;47)C`%Mg8HKCAzP2n z;0HUf+7Fx6g<>+yhrgds1XK{jf_LrApb{!*C8!VP9HU_KhuSr0goOtR! z!l9$@9p5stX(wfxDI*3Zu8!*Yu$>fT$_T&AZErt<&%3~m@r><95ZFZhrK!;LdrzTj zZ8T7>{>^3aO?b4&tJ!X1IU0;C+%^#YV|tL(Kh|yJ zmeBx9i294sH4&LX>pNYIt!7FI!xnQpK*XQ@6q`pL=fiN6P>rkl|NIiR=eG;;( zTw^Ud;$#wb{Wrvo(i`%2Uue{vPrK~57&Z?2%FT3N8H=}B@YHvq>!cia{8F_%yVkXn zP4sQ$W9VZZ9=b=CW&LwVrTcM~K z=lYMraTHHaUGs2++4H^MI?QZ2c$JNYs>nr_%=+63u^tL!vKJ$Bji_9BWvv7TAK&_@ zQ(pz=k_oS%&p*-XotLzGEGC@B9etgsq0lJxO7J`lgBZOU zvyRhrap8TLXFro^0hdJur2<)l($J|-{yL(`)ITP5QY zBm|N}RbJwjX8?7f`BB>~7&D>j+InUzuKDsUOiroi*D?EV46R_h-*C1m&GkA1vTgXd2VXFoDrQ#%CD9LLmA zW&B_7pS&SYsc7Z?(X0R5xtA$K9ybGwi7QU1~D{UID>hnBNxY2LN=)+%Wy;O=(|cFWd2(0j2;c_r+O;cMw~lWW0@l{ zaBfx1ftL*mwFg8UW4K+_XrgVmbrv^MS@ZYwM6B5pX{&?E+{2w7zZGsj_2c5_73YUG zf81o|qZCk)X#Z^E=$hr+Ryq(~7&bj+~jrjAYTf1{C z99>joqAsJg7^Tb4+EcFh;dp(bt6)8%Si^@=KK5}s=yaOhFAGm;rAaDv>^R_KXv5pa zIM(qfQI|FwIlrcP+*W_ZHPHaGN>=yf?`9jOgD1OQQC;_zT4P|j?FD; zY?O|zJXzp9s~7Y+SHnk3*=W_-mu;(`x}`nrQhuAFqpfYtnb_;%}L-Y_5#)TPU@617hJ_`E>^LZ@O5f%LzNPjBSpp~MJ6el zg5VyJxc}#kYQ7556gQUOyF09|AH_&D>rZA!O>ms0)H zl_oh#9Oheb+^VeP3Pf?o7zKOvC$V9jSF4IUV>REdjIyh3VteM<4VikS#lvX52L&fk zvtC)5>hE+tkBRG2qDOGnu;SVsl(H8SDsv9(lVED%IiLffDe&ij<;DO1zC$k3JXqVF z-v@GuB9ZKvNk)-SsjUcA?0!VrQ2e#-&*^FehC>WluaC4zlCz^QQ(>cNSePYN+OET~ zhMBTDklBPs4f_;V1vHD}XNATOk2fmdTw!Z4M>b}8CQ_N_i8VTYn)B)cJ__xDdu!UU zuj%48gT%hvUiNKfhs>F#224&x@We_zrebxQr_+EUOY;%*+T1;`F znsE6Iro7~A2#?jv$hsHqT1q+gZEhl*EnD-@jie>mL+x@eyS8xS%B7tw_qcT<=xvzK z=ES$wRDpQa8>0^mf~`AgRcZaQh;DUKAEnx)+9Y3Z!`$FbJm;?N+k9&NB72@@`kDQ7 z*aUZWnDK-Dy?&axZ`^5qmlY)Sq~5K%vUTUn_uh8WkaI4Pr`=J9Tac+MJY&{DR=f z;U)a#Ocp%U{L7(jPWV?u_}B2TtQ5qdy8f>&+(lkg=jCQBS1p7f?TANWuBh3z*t~_c z7be#z$KwxPRRA5^|8R}u-x%SUO>bD~R7}6K&Q<v)1j%Q!)DbmS`x*<~&o=K(*7~siv?9Ad?hU!_ zs(R(yTkn0Q-obttU01$q-{E#G?=^fPn#5mTRGT+JNL7tBFaGgHh@<8zFt2-$<~CwzOue$@JU$R^wPPuDh!uX~P#=+wYIHm~HalA^JA&1P zJ8D%*jd-drAPpkU{ipUO;9GI(>sP_e{u&za9jx?))j;#59JMOomJg)f?N}=I_&G?i zg0o;Bcm(~%t-|pP+%gv$%B@uIsEnys44MR*S?xTc=$UCf-$Uu7#PBvhmo0~Ui(14@ z=<9EHj#(d#VFxoD(}q-I6-e16orm|{htn(;vC zFx;>}{^43dddj$42Kk1@N%Foxu(EGjWr~b+)_bd3rl`Rb8_Is2tP;*nsFp7i)v-w` z$>|oj<4-G?tbhmNNfEhPfOjLo!wis!PNsp95Z_>yCkW~Z2iG7d{jSa5g8r^5=GoxYEfgbej5{%%V8&VjBy*hPGDvZQy-T7t#ZI1FLAzNbD(zkGK!=&>2DwFjR;)8d-s0U zY|N^;emm527!7>S&qD*#RiKFPeQ(ZzBayH9LYE_OP}4$)r|tBwJlAE<9JeIFI9i~3 za|`Ox2QV;&(+l3x6u24aYcthkxP&4~2k%4Lt)Ft92_LerxmC5DFEuMPqWD-)a`|AB zIJ+0M`=-08peHe9-ALfwZ&qJU9Z5;N;RyeJTy)IcObr^@%H($=!=?)!z?KVU?=I`u~Sy609HgBmOTcnPf zjdt6)KtI$`&vC59CWK_qKaob-m8Bq3uWoLFUN$CS^NuUo_%XkUKYpS?dfz6Q@SNuN zb&LA|R;1E~k${$3*PL3{Qj6Jk&~Yv*=z|p+0NB8utwDX<{ARX)eGRnQ&EKr{!vt>p z&LVeBy?AE`$@G*9q?(n%1tL(HQZqz5dgnwJR#f?&Y6orZB>9pp|E-o4hS-KBOV6P6 z@Gq3Jg{N2Dl74D26?LVa0^U<{VMOeDq^jR*IjHfw`=@>DzHVSd#|%-wY%mDl#xz4+ z3qO`Y>VWL%D*1<2c_4nyCINrTYyRW^9sM@n4!j>T)))sN|GJ}kHOV1TfjRo&0~&vP taeorqkrc$d!{!pVhX?3yzPWG_F#k?}>)>w={LO*CIq?512T0H({|f}BFLnR` diff --git a/jay_imagenet_for_co3d_val_0.1_images/ILSVRC2012_val_00027083.JPEG b/jay_imagenet_for_co3d_val_0.1_images/ILSVRC2012_val_00027083.JPEG deleted file mode 100644 index 36e858d01eb970742606015c0c0665b6b216554a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 14212 zcmeHt2T)Yovi2qm5>y5x2}ou@6cEWEAX#!AL2?d~B{~EV8Ioiek(^-&0wOs`mYfAd zk{~(fF#qc|Ksx$0WHPo!VR`<8R?q1!y(G%!d;F^MrybOSWfdQC- ze*k(4kOHpY;^N`rT*1S`!^giuKnNisyn2<8@&+jhXEdJtem`phNhObj;VK33PXd}?iL@96C69vU7Q9UGtcGC4QDu=ssx`NzuY&hFm+ z!Qs*I$>~qMFaWII)cUJtzvy)x)C&_E8w(rnr(PJC&%uFp9UJF1H|`AyH9TV{QU)IX zD-g-3^s;7rMqc%8G85-P0&=GNbIdzGRr`x(|1-q`{+~4at73oCYZ4&D!T=v0)^$J} zI67s^@FVzh{E5Mz9Qcz1e{$ge4-Rm=6%*a=>~rIK2Y0%Q21u2}uZeKs!Yk0g?o-1{ zmeSK+l4V1$2d9JHXLE%Wal@s0#Qa0X_LKZpj87tXYWA+?|Fs>m@Ux2Jco)k z@eeW7lM}k>zA?l+_56{)PH{CFh>k%6J7gl_kUC07C-U;z%dZx6XUxP|H)^fOY0HPB zE_&A4itT;8=i(`;Qs5geQ_q!zFG(AT(7=Eq8laa4w;F@P($Cknq)t)rpRzzXkk9Cj zjl~XgBhQRkLD99o%|Cm|csAJ}zA`MAz6y!$-h$g z0io;FZzW#+&W)@;lKR*+Ec_uF_!c65jC9<|4eso0L<5~X@EyK{v28T4x8wT*u2pg# zxoL4g7KjG$Q-eFs!5NE`;C%5po&oOEs^9-J)c=GA`a3WFHs2qdpBsz@meBx+Sr;h! zfRF9mcbadhXFER;Qd#qWpBy*r)BU!=1=;7`9nzKoc+to^_{Nwi8hEcIwPVb|M3f|XO|374Qw8KxKDJO&_Ibb z2jfVCFOMtoSf3Uf4Ww;dDyV>XXq~Yinx6?C zS~^KPqJiy?BKT-P;@f#_DjIm5d3h7hU;Kyg?_`B|vmY9-FMoO0_TQ|jwx0VXxIES3 zIplXjr0(DPgH_^ljvl0!fxq)Z_I;`aZJ%$_w3@#7ITr~geBe1~Ha~G@Ljyt0vnr>= z;Iqv_1LxAT-M)$5f6Dn61N&dY6DMwwDmg~Zwlb`Z$2pCdBSG!d6(v}8lP3g{661zb#2QiMQa!AGY3X< z!L28!4?hvCZ@Er+Pjq(%rW_ZMW_B5KX)Hya=opS?J|*|!Wg9qjSkutH*>aHY)4Zm| zAkM-r(V?MFEQHJ_N%Gg|H8x0&X|t>4;(%_^L>WNt8}+^TE-0dfk7KJJHfEQJJXjGW zR(PE=pIbIm%!mIlM2@yHJNrlU>J0}YZSI-cQ0gmoY4}wB{$u5>3vtY!Z|Om{>o#{M zx`yi5i9Tv4Zo$1)W&@}_S~C3iA)Qlh%9-E!X0=g55qp;D6zv3qj|%WKG_r7B)6J}? zvPkYiCqD*Z@w~gJFQky@ok;4B(=Tz%xEq^dW9yOFuGa{A8F;^TVwt5lje$F!;#Rm~ zL|tBr#poA9lk(k(91*j7{Zd0kHHGX`N6pKSv9I(%l|)lsxeP*@J!7#DuuUDQShJjN zzehcyqr{CPNZ~Xtx<;q2-?9tcF%a9-(8@1BTlI^ETK7FiFD;I0Bzu(?Rdt;_ zRp}cVvWZ9Dh{*HZtrKbp6Mn{DRB+!gntm$cr1|d=`bYrkm>G+c@#MOQC>k*O{Okf!S*`oxDjJ9){q*gS@B%Gb-S4&-;BS3i1OQQ=98D34J z!|*^FfA-5B{d}RXl6CdEE%XG&Sns8y#owFB+)+TWc~7Ef4XORssf&-WKdQ~Wh1`ti z`P*@zit-4Oul zv*@Ij&g>|po}B46U)SM&z+WZV>VZXOA)1RQq~^Gno4Fo)L|K$o%wl~{^WjIqoW7m` zOR-VPsKb+{z?d+qsKsy;KF6MEn7WRN6nSlkmDU7@nMqNP;;qE#-1Pmt+uW#+Gg+hq zBSxJ}oQ=udL;xbKtApff*!mmn1gLlc! zLrsH$AOwSt%N=3XnFv5Nfl)qZr6;e4?;)+p$+;1fIQ%_LS=MZVLvigw1o`*c^sJ#3 z9rOczi02L2GBLO@y?Nj9_{f!{`$i#ycJj)EMC#MlYs@!E{muOt3TuYzx=kNg(P+p{ zi2cg-;9soaDQ5$Vr}J0vP@+!%UmECd0Rn5u+HBFMZXU@*K}pfZP}82&XZ_0HJceV)Ox<<=P3FKBR>$8&i?^}S)u zg`NgwZdK}A0OxL49WPXtA6oV$wr1_`4&Qh%Ed{V{@jdMm=qCe&J=%mtJ7BYMi<^D1 zMaurwZR|L2_J-1KkS*U2`|fXKBWTbHdx&xNHBVjZ{S!W2{?EyK$~wzL?Xoasri7=G zb%i$?yNg;LMO~x5yQ0^v+al)(Y$(KFyU_%$sdcjyhqI;3sm6y@Sb5Qo99Boj<4<&w zGrEmPo32EmrcZJE*Y;Toa__{+Dib*L*I0P?HUH%>vv)q2aS9{v8Dz+)? zC@WfZN-_`IEx+m^i{2YdIl&GxWm%l0w5IQoh#J@3-wK4Y+1S%2(~nmGM}42gHUkeI;A}rgyO3>NSfhbl=8l8j{kPbZtGlU?eln- zxfXQwSrXC^Rg8Z!@VYyAS+Nn+em6yZp?5{KB1NqO7AOx(;=fx?=*M2e_-r1;ry^XQ zd|0qSFL)p_jQw(%6%+L^t2b0$%K;KGix?(~o3CDD7az)`eMvBF(X`g)tR0l)DrLMb z%}Y=4OxPuPm|_u2FUyK&b~E%R;sUo; zn@=){KQ^sG1HN~V@sKxDHt+LsF-=s~ixinI^~0nmm$4US3X8(BoHXP%Wi~QtM~N_m z^5{>(ByQoM2zrwPB#ATmp7dL`EH0`olkSEQD06Dx`OM^TG4Sa*Wd^@wF4NXw!KI!I zo>x^dJ4|lGL0Y)Hmz6f)4#$b!>SI?M2}SKRR_zS zwN!^@FV5E}cHrHXCF?hER`T+Al1cTJlIJcxgw1r3I2F7zWS;H}S04@h$SH_yMMSnnM&xB64pns7G7!Mu59bF& z)=$Ko^xR$y?6PM<3#C-KbofZ$Rho#Yr^iA2uGHIg%CjI%nU=C2E#+$B227vGWl&r9 zovX2SLks2iR0%}-<{Mekqm-d;OVJOxBs4Y-Wg>Tq)>;vDOW7z)Uq{ZtD#93H(Pks{7NnA*crJPc^D85wPhJDnn9{5Qi27 zTmCgQX$bI72F5O%d7u1JW{gYKt^7nw;4T@oOlHbZ)Rs>F_4bt0Ow7`Xf;{u+TW93} zjO6I|RJc$bUZa7Z9xtv$kSO-MaWJEK{q3h2e547Q#8)i+gwvuk7b^=3OIWby`z3 zOP}3ad1z7>O;Tx-IiW-{-L;!iv2THWlSWJDU2VzzZ`|MM$=k#fU)K#)i|j&c6`W=6 z*pnEo#}MZ1mC{nn?p?6n?u)pl zr{wE++n=A`WrDdzwm>5l=a~fn{3~Uvm-wK$23&SC84nw6F-6o z6ol&gU5l+53I*~2k=Dj zFk9yLk3L)b`>#%2^92)l`M60#bl!V4OG8ZW18N9^oUBK^n0Ar4vA0DF*2!N<4OPw# zNzl2!i}3oGr+%E&8*4oI)<9#0lFmKN{#xo>rt*?n4_ia7pw1l?hGM168du1Z37#s3 zm*ECp&f&vcpSp)S1q!cCU8{(0a?*0?IV@WqO!{7G-D1QRPpZ{p$NnxvMlN2S2U_Ts z&4e(ek|_%hKB<+K89p1iiwvq#uP!nQ>8WUsPG=g@oF0@Ww&(?YJ=-64dUo?4oqOr| zuin2sp|zjk320Z)M}Hrqe*NwffW^2-%wF<9o?>xe#t0AcJ-9D(*wklN_q3KH)5+>pJ&tl4 z6>Yxhyyac8(EORFPo`fV@i6Amo6_jW<#hMy^PWWD%`p0FF%Id#K1Q$3Ynw||-t!_T z;A6bm%bea3Q9>yj&0J4z^>j>pdh=~9pR1VFYfHyA=W$q2&MT!l4~wf&5lTAmM=I~g zA4w2*hMa9R6`2PghL3zmv81upip%VA(?0dDiTe;(O5@OuajNy*L{>@<=~g$$&MUvf zz0wY#q;cye&+BGZR@9Yb)n=(Yq-KzUYrP^^`s*XA841rS0hghKlCd-s`_9h1?wtT0 zs8TDH4ATN*q>UBf**h`(b_HN2qx$SC*ZeoGU@vdCV$?Uhl$1(pHoGI-ntYvPQ34GC z_cy7rivvT@Sy$0Dxav^6s+k{7%y3z;iFvdjK~J`>JKw5WvdtabF1^FZH%n>HS);!~ zMUQjjW?hS4H}#fH_cUvocQv7=I6T(pC+n)udRJ}56<^czd@8}L^tj+4*gIpBcVnh; zNm1fDr?I@>MqkXW*lhS%=guqd+)v+5u2*cG*`k5Wr!`(nVxR-<49|s68D3Bw43Cc) zHwKbOpU^ReT;9JhTmlJUXG9jfpaN!#Qa`iB)CU%`2&s8tc$x;_tF${J#wHaN6Xr1av6xm9yO1iWe zRSvKUmXtM#sFXdOnissyz9(+};_1s2$-IJcnTq9FiZ#upntrdXm*yiY=`w|J6J3n3 zLQY$RJeTika@C82vQ4#2Ze~Es6k(P?9SIBW&2||Kqk8mRA}4WAEyU)L&wv(^!DICX zH;kH}AV9ZFJ|mL{1<5iRQosL4?kv{!OSCEy|0SI5Wbg2H#;=1m9EAf`L;u;JqfO%9 zN!C!jCK-q{ExpNK`5Eh=zS7S&cbet^#t8 zf+i(yM1<&8v;@BH5kOfxhV#;U(^O;NW5-a%LNviN7B)$tnV zF8q|InqHdkS0xMDBYCK(p8TI9*LE0jEO*rBuxE#=cJ=_sZ@S8su9dC$9PH zJWk0z)j9IiO%rb%g?P|FxY*OoYDiRtN53Sm8l9TPGxK(39U9A-G}GY2j5d0~re#DV zpuWG+oD(fXd?4q}80Dgee3eSH?&0={I7|pJ{DNdCfYUmTB@Q8!pj6cRf-!vBtiBFg9Zo7J{u7 zwBoP6$(Yg}qM~ogl8F-#wt!iEH1eDkd(jzH-7#a=>T6TxeWJO3dl!!HpH!)kbOHUa|jVqwLzM*`%1w;1GGyxK5yBkr~i#{ew@f> zRn2nOebC7KgFu_1GdaEsCTJw^37#o1Kf43wX8~Z@{n7T(Mc|#ref6>L9z(}Ti;C`` zTaya_^CHZDGDyO%(hOn6XHB~xZlXNN#mY#Dg$T; zpGaJbJtgH-;PEma^J3IztEdzb$HD=xQHv-{2TV&%ftD~XXkkM1g2~*?G;AYGZ!Lb? z>=8!l^H&SKAvI<$DM{z-;q1)M0mldX8!8{4#i^zyx>S(VZfNNxbkoiBujuN2vEdA+ zyyDC&w`l*A*s0Gt$WNU*z;jLR34+rqkbWuCf%@A}vK(bzAHT!a5DFss3FkIEhgv^iXrHpSADL4;g+VRgualsf9vM^K&apkVI?EUN~ zq;dW!T6RI}#$zMRx5pyGPGtS*mo&F#Fo#Jl+`uqYVxZ&apan1z9i`{`-n;LFNua<__aR0b53W%oxG&@axW^?lOD+ z_>sYPx_dGmh8SwMH0$5s2o!J|&N(3?yi@PF<-}KsAUxz{a(Q`+nN5~5ZkXgm@oIf> zwC0@%N13?B@Y!(Lyfj7DlRyT^u4iImAqFD25xoHqp{L5+N+~kTO7-}<05q=ktwg?g zvMI`v=pW3n}0R3tf4gT+}EYx0R> z&7~6Pc_p?Dj>-}Mje7ZYCdhM~?D0XNhCH&sYsjK>LV+z8hb79?D)+>7i2I@b8R>uO zfcRu&DNF64+E{YK!NLb$JJo#}xKL>rz>*F_EZIlBAz?-IP4~4Yyst(XJHzw?+ilaU z-jrbzdW|#YiZj$a(K&h8RZOFK!@O;JToPDDdcJT<;ngzrp;@jJY3vPr>JlOy{?+r8 z0Lx$N$bPHq!upXP4Hi;AO|PG}l1YO{fwCH}*c(xtR?7SY+qIX$p+m4!V0P$xneY#* z#4;z=7={|c1(8KB%`(NhIMdR!6NO|mFBiR?PSxTU)p)olcnPhkr_nxB+44-J1mCpx zv`CWf%wzK9r7pT)FLG_IiO&J3>cjxK(~+R{o6q8wZe_+lr;p6m@T?*vBY3$J6Qiu~ zRl;`Qjof=c-o7<@Qw3JC#D3!9$9p^y3iZU}h)JPH0roKqajAC&1@p_Skop3*K0@TW z9}=WH0s?|v2YIlE0L|He8Rd#}k~M|{_hX{4Xs=;KhiSp34EM>zKDk0QHoKu~)wFeE zOk2TXOC?|X$i?alS>__ekl-usrNrt5c1CD!0UJw!B%2+@YAp2;Tg~5$V2Pd1TyAr zz7I$dn6lU~Md#??=JoYn4{FXIJRCBjlB~e5jXpWEUV9#KpyZ!pv=kcpr41==H7ztu zS+4aaN`u&{S_C1CFJnDL#PaE(l@z0v2|k~97t`?A)MO}mPKLT%WVWvEHgw=6TTW?4 zt55<*S=*yN8+XbayR?cy-GGrRt9{Lieqg2983&QA+a^q}@z_s;WdqR-6bunWW#{RY z%O^nk|L^SR*V>pKSR4EP-`B?4r68FI>-4u|3v^F|6PiTE%*s8>XX)ask93%IOb*1e zr7&GlbHrLbeO_^bsn7?m+se~AN)EX;=U;`!MRWZK5I^Xdr9BgK^T$<8wmX|v)VHW* zS5)yjSA~cxFkM!~Dw}PQ^$F3VR}H7)LHG5wSy(m``q(4U<8N@lPmPiG1puZNv~DTuzu zRwT-Pnv76g$S+XsY8aI9#3K8l5m}QIn<}v>gCSbLVs~NPGnGk^$K$f`M4F;g{X|>= zYteh;Np&Xc4odLxrZ3-i$))^h7kVe(*HO#a3wfo2P_>g zm7oDX@R+^B_w*UQN49Fz+&l9~BTi=Vvpc@t(k`5wIvT^C$&;0lVI2W=u>R^gZj>1s z7=GD!uJk0d`2{<+$$g#M&@~81oBQ-s^xztJUMTz)#+5XVE3lG9u)T-i#pd*`;*xsy8Mgyj+;wtYOk97uI zHPL{X)JY=vCYSX*?_}TOIe4f|dyomLS#n9naQs}4{?*{YHE+6~vm4bE&uHm-KAoAs zyCcC4oR{AEPjMsj=+4~X+c|V3w%O$u;dy9amIG||@Wqj)o7ldzpqnDS)bu$45j?C> zIrp!;4mJ*zTtY}lY`agW9%T5Ed@(%d_#wppn;39Al@ql+G;oMpopRq^YWf0%z9n$( ztKA~_yN%#c1$?_CjV=2kl4QyFqU@)7mY*hn2pCYSb(DMoktUFu>vdcf;>v(;jDmMI z1ZAWiJ`zugJhRB%&0MzF?yG5vT(^Hja(V#Y&a>E_X{@@MzG~(TlfNX}_a#$=2An+> z|6l`$ifPwwr-l%mH_(kvq@FszK?CcRu>O`5R)ZgxG^0&@GIr)*Q;Y2ylq1_{KGXKi zo^{m=O&=7fWoigF$dkiD5I}W!kmStu9WG@N;X?Q<1nl_oHI!x!K&-MNdoN;rezFQ4 z(hMH&Ty)St(@#oB&5@8UUrP5Hq5L8Qm>-%gyy9UJ7hZ>Th#euB~I@cRE~VTiA@ zLExH7+0OX+?H5oqkZ!350)Y<#AwG9?N9W?IQ2!;M2zE7%PgJOZGLWFeIywxugN_R? zgWg$!7%_tn0gTiy@3tB4bFPUWW84Lszzj_WWyhH(#Qw+f8v9xwiV13MMC6|LSe?ziLVP^S6K9vh^oEf8z5e2ma*1 Le~JTG=r8{V4XkAV diff --git a/jay_imagenet_for_co3d_val_0.1_images/ILSVRC2012_val_00029849.JPEG b/jay_imagenet_for_co3d_val_0.1_images/ILSVRC2012_val_00029849.JPEG deleted file mode 100644 index 7c6a2ee125838f04d312a75e2c551e0950209c36..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 25325 zcmeFYbyQv3k}te)2@)(gBtUSt;O+!>4;~=6>xK{@xVyUqcXxM(0Kwhe<=Z*;-tKeH zc>Vgl?)yiNZ@kT(EZK9-RW+q%)vu~v=3iEU*HU7VVgLjL1YiXI16~#ZVE`T$77i8$ z9u5u;0RbKn2@@IV)hi@ibPQBXLOfz3LOcS3w`BAbZ%JrL2?!`ZP|-3nv9hufQ*iQf zF!Ru}urmK{1Ofp80qGSI4l*(h^E-lf%>UP)mv#UR9%2>J8w!F1fJB3ULW6ke28aOw z0tQU&Z-W2+fq;a9hJl5HM?ic9u2AzDfP{d8f`o>Gfq{kw*Y*Ve4nU*9pub}hgvC(M zha<7YWcG>7geMiM=)h8(I3;5-u=|RD_y!vX7w zWffI5bq!5JBV!ZO&t~TK4vtRFF0O9A-~9Xoz6SD&?R^)_v~{&_jDHw zeuKgbfDE(7e-C${ceBm#%qg|%)`PYA0-TQPVt^_8<7ophWif51<9wL^bVnOiEvq^E z84s>A{StK+bTX{>XcvC_nE??r&aVmPnhA6^NDud8LRSX-j#q26FF^kv_rbj}$iJ59 zLHYvl;mtkW{w71Io~TOH^}{18!#B`Q*YBtQQS1GmYAM+8ngpFYK4mnM_d&_cw*B$S zRB-DMYjSs|v;2Qt$F^UY(+q;vGI%J!k81Wk-T#A6s5OSWi z8^X_?Y}OA~*e?J<&I{oG?KI%=-hAa|AKV+F?N>oG11|uWT4HcxjctGHG$pws)YVn~ zjVX102{0EMG3{4HH2t8pY@T;1$lO}|*PRT{+>&6r@Yf*bWaju!y1@hD&FdA^cUl)n z`>3-vBxBdfxlz-&Qb8e4VCf_rfk3EyQxXJFX8kTj=1Jc)WusjSWlP5w1TOk3yw!`S z3}7X2+pjWSIjF_H0G-dLWlvC7=?@pbsm^}JzVQR?5*NP!D+z74TNa1ZPHfj~+(d<< zu6mCQCx|b=IKRfa00 zspNw1=j?H8eJdBxs2WR_WTB2fSQ?sO2;Dkw3eO=sR3%cR|Bn{ zpt@KbirsXh>$0t+6UQ49#tdgMZg{O@!_h>*g6@}-FxGVW9oLYBH;)mo`X>78Wjl2J)Fn)m>6b{8Fpv%`Cwdr-Y@*j zpWOrH0@({9Pz|FX`T0^0Y0yI+SGYW9=DHMBMen_l=QYmQM}? zbdp84EWK{W$?WNcw?Aa#W{nrlz*JB@ifQ?t2oQ5Dx?p$DIVmh_?Z&fc^=C#-IK#r# zcl?N3LH5vc1>zRP%+MN{3Bq@DeF`*O#@1*S)%(hZc|Ndae~8qiZIvda*S<>e%%@7N=tRmF;_IZ|0aVxj9jx} zRb-ofpq#U5YDRQ5aXY)PJ=go;Ojmcz-L4Xq-F0LBa8b;T*+yIzg=4*(9+s3?Q^3>m zRQg#G#qR~MCcamA?*`-g0(3Ckk#S)^HG_8VXvV3$6+|%sRpLqWh{Whxxkjpe0Vbri zVgpv&?$CI6Uw}_T)=!u<4Z5tY%$K?FUr546hP$6S7gv@S1N$Wv7-V;IIb{*mlFubx~y>)U1;Q|Yj87I2Cq$MTq?RG zG$aNNImlZK*H*kwngxG=I&C+wn16i~iHn;=f+6ngc7nQ9J1hHeOOa`Ie-f_3MhQK! zC@_bB?>V`zj1;zN%28Gfw6A+3XN4eYQbI8DQnp{_W_e>kO~!!<@4-|Mfcc{&X0rm- zF)_b;%Sax^;^CE!Y_A2)Ji5DCyisI*zrHRv8CP3JsP-6;EPv`aozCMrv`}&K0?h4t zhPtb*X(Y|p$MqboPp(fJCTfuROA0|Ujc*e8PI~jn-6j_N+eciMamt&;$;E$n;W3gK!-!wb%xIC+@XnhrrqSoB?w=+C z&Db*nXyDy=+tJ1gAVpRtkoU(Jt9!Bj9=C4(Yb3BvHz5Naw+%V)xPh1Xr=j#G%o&B- zNbvV;22(+I;NWj;|AYPivm>#=EdzM6p@Qa_7r}Fl&PtW|>kF_ca(eUROV@U(IJ@OC zd7HQ??S!cyD)BXlG6V8ID5$=r19b0X=EHMf~R94YVHw0d=AV zyU(OkS3TC{11+(_%qd?Rj;>0y+Z@5VZULnBq)9gI9gfmnkifA0`3GVtASOx3oOk2} z;90!Dw6Z>~+Y>-HiOL4+mlxohOoFC|q2Ribw-X9ct2Q~$B;n}~`4^ZF6jCe-q0i~J zH!px@czPP#)71+wn*nzrMwQ;&C-?$*5})RDJ%Coky!lja2cYu1J+(Ya-kE;?Du4s~ zzI|gVe@)ksl>zVAD)FjJ=5`YPHXIZ=EdUm)UwT}RZutcIWR@mdGdwl^6A41?{aeYS zLG4XHCvgdQ?uh5Vr}|-6{%H0BkWhRF9pZvp%t|WaN2lgT=i|x>qWQ`D>91=$8+t0% z&fMW!+O)dNe3`GLuNo5EWQ6btH^ykt8ba=QsE!10;Wi?za8bw!?L_&^1~O}xXuuKT zds|7>WgZo0ZN&<6HC(VvlK(Bu{$3!i8U8Yn^#3x`)Iu2o(?sg@MKP7X|4mcfR1Is8*^dwFi1neKO1tTjmUOOucbFIno$7|M+#)c-K2#mNA z`=C=j%D}5HGgmahLfKV8LJ*w7vh3-WaftdQ50Y(^V*(@Wdnc?-T7{T!N$>5jGssMe z4nPa!G`9NaQg}Mi;^d6-A?Tg~w+b0WU`3oiKTe_Pa2J8_Sz$TK(ptLa)pf96Xo4NH zEG1OvX}sSt#WaRWn#9p0!q1Or1xuspRAl!?M4Sh6NtYgT3E`+Z7L332=nIQ9-uDbg zi(6VdOi&kc?huYCog@ugqc$50&e6}Crrv)$1SRC88-5}nyux+)`i?x`bo zS5A2W7R}_I$7G(nr!cole}+G3E#9^8iXRe#hObW8`A-n#sztc^+frSLuXQ81hEbm? z;O>(24xiqFhU5k@GkATE-R2fDUVya1yD82485F$r_k71Nu2fT(pu64M>oERj@`MGa zu$A)#8_ziKh4(eZ55B5A@tdnxD)+=2ibUycLB8I|`)V4>0iBuj=Y|@G?4b^okW(GG=rZF05jNB;|#LtbD zhI+?kTN27nDjBHpUJN6=ePLGH^;PcTG_a)3svL_UTas9v=D<_T3((A>n|wCFe!HOB z%VKE-QpMVC{Qd$6Q%v|tFPToaLAPy2?CM>ERhaC}GJ#U4)%eFK!dK7B{FffFD>Hx0 z`v3aU&CkMS9h|5NuqBX>#E7S^uf6uG=rI@7?~`gWG;JbKJ>)V(>ztl0!E;aN4H?0F zKNzMfO@I=saCDs(JNL_Mn`v;pg8tKj9Om9k$jo?oVnEGAVns%N%zBP;M%mn%{6|CdGxT%2BB(F%VzvZu0S2? z!GI0)z_pVHSS`?QZpmQRsovxFG@=)v+_>&JNOP$SPY;79(5f?tvhekv#Ku3w$3Kn` zr?t7yRnL(8C8l-1*fI-3Tt8i%_gxiVKjcM0V0sI(QD}y8h4Zj{*#2mLJM*W^moMl`m*KKGYplKxlu zkt!vrvP+w2K8tCeA>#wGMaM7i6s&?h6Zy9?X>P;~Y_(I84EMdrv^*Cx6>%%=Y`x`V znZ^!u;(U7Z1u|km zXuH{hRhcr%S$gC0=~B9s2!&DJKJ#No9WfpD!zGA>ks&IgY;(}nLCo1mj&tqx{Y)|j zz5$gK{d&W$?LI1Ttmt7i?-!mgH9u1xlQLCk^#G)#(uE~YkEuJnUTh>URhTBmKEA7z z*{V33U{Yuf3TwHtTxC4>aEzn3b5&|wh9zM0fft$aAQHS6w#l89nsajp#wM{jzY1Lx zd!!$LO^0&D_4(L0TH` zaNNmRwKRR&IV;F%qSSJRaUsc)p$jYKN{PNsPL=Un)HSalmy~FAy1&M1;|f2L+*Cvy z!r=kAKqRd@3`Luvy?wqL-QoXrU0NqYN1kZOw0~azhzMSp8P7FEx_e#UXPvc(BG=$R zhd6-ZU=_zc??Lv`3$WBTJ(qrK-yP?~MI%b?u~~12!O`CfF*e)C&5u6+i>4wmigmMJ z0&qiVC}^DDa!*bK>4VLq7vMliC*D~ z=Uy5OqZA1N{a5sdrZ^oW$xdh+D>!TxmTEmpl2#92wM4!>1X%A1mhn!%Gp=*K&<^dQ zc2g5wf5>Lr>{Q2tec*Bb5c|H>4)iT@=~*SXApACd=nAt`kuT#GjB=3$2)zJx;ZMZ> z3$uf|8?4O%Sbxza8~&Gf(VzrkU_h07g)ES`CA}ab~7SHFo z#$3S+Ur{enz?L8PB9#1Gd^`L8mzwZ=N8R)Y@8?`znfHSGf7XNlb^5K}6~XfXuSesk zb)$u}!hpJ5vF8EfC2Itv)1$dmpu2M7y7!w{{@L)753Cg_gKe6?;^}w2qg#aMjg9nc zycWDk4bqr?Kdg9aq-0R=77yBquJxL#@!V06A^!)x8@v`=osY~^%g#i>linuFiN&p9 z{M9fO7J($8hVcb(g-;w;9c;Tq1ZQ8ka*A4|eM5Lspb2cJeMels)kgEBzWzLaXX_7V zw~@w_=Kg6Wl^I#FW2n2>JB)p>Nbo9W0Y%kKCH^}d?c;k>bSb`ZI%Vpw2xGE8OHw(G zE4(iTq53=aba~B4Jp+Ra--`jiQZ7QCvG&WCJ$*Zaow~ODhxvFV1buGPXgH;gyCg=> zpca<|o$~{+$~e1k&gO3Cl$e+U&F3 z;SYG8kscNcS>A8XdQrsSao7a&oTA-dOIr3VO}bKqr*hupFoxglrrOS?GrosE46dD1 z)Rw9MiV!w~E)Nu{q^lP=G?ebpo-76qZ$#ZvQF+5PFSSAyStpk#NL|U`!=x^Obi;Jr zR2mn4hx4<@<|tu`h;Fh4r-9~G8;I>tF6XGVC^gUGv=$m^p;M2>Up`IY3jVYBCWzY} zDNzt(x^8JTu8;f{){Jn97b=2qg8^1{+~}ZO*?ja|&Y@qzv{$ahN4Yg$*Pcg6VyU%A zTWz?#u*Sxqhoe}^RmqMNQqoHtBYSeSC?S{n#nqfP)4Q;QmCl?Z^P}>}T*V(N1IWV5 zIEX{ZQjdQ&+o%xj#N4J~2bR9d)R`?9YI<&5Z&*y8->Qrv!I7|CTy8AGJ}}fy+XhvB zSD~FzagO6Z5FB_s2yRi8ls%WmQk29wq!B5#pAH~Pkt=TfcdJ%>K-WhX^e1zMPwEme z>ko7CujWeKKPyE=*=Ho#XQUfNlvyp&vwy}5A!cie@gp_Z^7W^c0z*OG`^5xjIxx*4 zaGE>SXDMoOre8F~u{uIy3`)no4P&2t3A*_*%vqeMM&3x%BvhE9N?Wm-6!KnukPBxb zo~(ErWLgb^!NS1d6X z`!q_f@+(CXC@R=wP4tcFgMyK;jNB;&s@HW}-afR_BvB*-@`+Vaz9}Ai&&@0;axW??6@_I(XDI z-{{lBlyhArWtl_yDpE!rqE zh`Nlsr#oevSeg?zi-t*SS6CmFO~|D@R3Y7<+0O8@iX(~ABP!9D!_e28ui>#sVwo%i zrUf2zP1z^JsqiF8`UhL^kci{_hd0m1;`tx(`edj#xnW$wKnviVQqhblZ^CU5yZ*IS z533B7osbO|I_Ul?x5WIyLe>6rt!6}3v^1LQS>LN6+F<~oRD@e@hoVTc_#jsmw;~@T zTwe5J(|7vF`niU`UDkKqIgn=#zE#3N?qTSeHK|2=1?|GV9x9&v#R3$}y71-o~U+P)1O< zKDw&E@b{#s*qlPmr&I8PJkk2okHp3XZ2KNyBh?h)J4&|6>?9SLdK|yl;kdm3sy!-c z$G_gJ@=W;Cc)vn=T6u0!i8my+RJbzbvaAtezr!#(=em3W$fFP45{6irAM@cT@;R=q zN^WLon|pdXQ)o~ic%3%tDsnt!N1|+ZLgEcFzA}H`m4bCur};DOTG_-LDL=_}rDy9Y6>A-~eSC{Y498{@gtRpsi$veNb=eC0p)^ z)bLbtcf2lMj9FYL%VLbAhH*rCf=`GiQ^>=Eny6{vn0u~pfwcXWC`nY8Ykvpny&BYd zDcksx{?P{MVhBj8$*e?2IBQW;U@)ktC;2lipMeW+p_Gica1{cgees5pk%jsF(^7%H zw=|MYVj#-KosafX^!MrLIR0O9X9Zggw8Yy6k^=^=Y+AH-wtce=OG)$BnjzmBVx+NF z`iGuF!^HJcRCQLFD`NE{=qKXdK09XH=ow{ibqTEKX4NEw9|Cf6Z(jiF8QB4roil}} z$H+_Ze*@z7L}GWbz=-P~RvYNI)s~B{mZ925`FkDz*FjoMP1j9TOWmolQL2*R{U05v zX$j1#El^TbE9fasrBISvaV=UxA#9DFG+QpoXwr zP2X1&olYszT$5{!CSpPqGxWEBMy>DwI|fN80@@C7Ex~wF99TeifWUy0`*TO!$~~2Z zU9*|T$Mg41VA-4__`dQSa797Mj|JR)2$kkv^Q;)>}#IgYaOS{-cDbb@wO4j#5&XFepCb3*$p*^Vcj zqoOv6ugh?b8^@h9!?KK?=^hm7)n;4=x`fgOvL99fm=L=FAt=^VQT20u3DS-$ag;~9 zThuReP6Dx4P*tR4y`21^>df8!Ia)#|qpqZRsD>vBJhrT9M&*mgDilPnWT^UW2UC;z ziFAD?*RSvg7Iqn8L=4|X5A3>6ub^VRzpW~cm+v}EZs%(Vi_}p>E+lN`792p4Mqx=) z50_dwb@nf1^;7}FP15eR?q4d7(0hYYnc@2=HgQO2eYW!cga-7NkUx zvE+<)i>52-=@$20BP$|o$&l?Zo;}K3v$x3lP?>EwYXS2D^YCqyoeij2u605?E|7|)HGgtq0U;mbzm*yhKPDs_GW+LA%k ziOLot#(2)oro15Y+PDKE!3cDmqrSJ1D`^Quw!}+F_aM!Ede$Ru`AKUlnZxu2keRYS zw4!n*fNq;+zo)E!I&wiig*2FHA8pORe|sb&mZC7{yB(@VQ4#MFnU@Id?RBo4z%-V( z1k$WUutr6_UkAnF9W|?J8pqa}e85ReANI>egx7@?=kZ;d0o5*&hmw%Ki_l=dZ3KFr5Eq-;#>>x~TZNc%QO@s5?dRZzJ#@ z1>eQY;CTQCqh@0(wV!T3@Xo=>D^~}2Y6RX>wjH5% z%rP@IFu}!AR8W0TF%w04dqquD9aAzJm9zU>73Ayr3$Rqo{}<%xEccA1Kkg6=F-QK{ zGW_c~F;pE>A?N7ISkmXh)lpbieE551b$rQ7Oz%)R;_p8YB?lmX; zx)o(=Qv7Ov6eM;8j-yCY!kj%^LWge4~3CY#ZppYDMo z&bImnb!*@0Wa`Tx=xj}Q`Mv=0*lNxqLw#()NT>TXGf+?1v71%#qoIT~i5RrudPHa$ z)!R3iM*Aj=Y*beHv41QqHISreX#`*WZIbI(KZ9HMN?gxT;LvSVjki>W_qYbV^VzW2 z95+tQ06*>FXMo;`N;WAe>5{MkW+6i~Q~R!%GXVjJ%MZUqx}}z?!)3wm&VRurj#2QH zUpw+u2bpb}#V-#AP7u;wadu?WlX&xa75=tTV&Z8%1KW6t?#3a1AuyH$zZ50&bP%Z+#Do`-bRuLYfoXFZZ-{ z&Gr#CQy5Crs>B!wRZ=U?G)|yF2^j}v4(vI$-P~<(#8T@a=~OB>+Fp(opH_Ag3BfLV zmS;?kpW2Nag`?)oTtAI1KI>Q4^(8tsIfRY^xD-p%bJ#=<b0nJT?cAo+e3;XEgcqh*EQ(zUs_Q7&F?C z)d2QBF92OzVqp|Rd3S+aC%PUU?gL;hMBT^twpNQ;O~ zlHQ?-JF-QQ!ljn0{7dYT+;HI{6t}ok?_`)mLBN_0QyU4#&RM$HBNo0F+OF^T66#|@ zRMY;d*DK^U1P9T}4^UvmnXL4_@uSdhhva|Rf<&C0${O<37`pMZG%NamXv^3wHEybs zCq6i5ZN#n>w8`4eTr)$q_NfetUDi^R_?054EhbU(*(#gOkHu)FX)&Z?yGrBSA|FmX z3V!FST|5&~7fBU8dW~MBvWaZX-jr#{w{#jN3RxM2h}Ran4#jtd+cl<>@~IOVEJ@`p zR6hu%FoKYcCUp8(gXJP^%zR5oqQ?^)iq2pn*~gfAL~P&$$>K-Sx* zp4d09|I&*MeA?mI&gYduf~6M;NXzmE3p!BoBc;`fCJ+Y|T%VkQ!k_Av7tBo_h&W(N zh8AFm8=8O-_r>>Nnz)n8{eCW{u&dYeV3`N|)1=vC*7EyeX>#TM^UymA(oM%*PfrWP zg`b41MlqU{W0-a_worO3c^%sr%&DOv8^oWjR>R;nma~J9sY$<-*;Qt8*ckB`UExG* zgsACx=Hyfi2Pl5E`?;3n*{0Aa1hG?Iys_rJz2uoSB6Jau>}q%teQW2YDuwi(Y!v35 zR;r$3b)x=72xL19?!J9mVwu>))z9PO>~fccE_Ajjoy7hx{v00-$sGFkf;O_o%ZhnK z?FuoY>cx~3koUq+p+7m`9B4HX*p~&aVWTy|RixIo@7%43zK_-J0vjYb?*3U3}yz z8QToGxGcl3ohF;$5yJZRy;Mk|q!0S$9_?O5F8;7ZHiS+?@_c$ecVh|J{7U#u&yXX( z=Ej-MhaeN|#+r*>J59@WRpY#LdzNo?-zBMVhZyDgnQ7Gd9SIy~iUvq>%r>Kp`pFy! z#aq~ww)xy`U@SnLoIlX*p*1#BmcR&oj}nSs0VPc9Fs%4vi#YgN1!um$Z_q2YERXoo z4t^K)c|>JrivE*;k%b*9G3{XAC3&&B4<4<)kJG4U;|mb_v6ej(^f^?xkTDRFFr?@; zv7C}7o^-7c{gx@WFw`i^vy7qo+ooyKZ>|DX1pSMGxP9a;d^;V5hUQ5D*R?S@R8N!I zRwma3cIH?vg1HohwInPtCCFj!3XG9R%bH$i&NB6BvU0LNOCyb@(k${^iX)qg-~0mO zI)E=YYU|duR8x!Q#|uDnINqG8H$+A+s!-z_8>B56Uu3}fkcG-b8b z-*5K8$m$ZePs*#t!kPB9wZ(wZvQWTRRca2NJHKS;djiK2!KbX~YTKGsg{F{SmH^ba zWuL5XlApt`-pwTO)Z1u&8cI|&1pmw3zuBOycm)Ub&9Pb`&Jg! z1drHc2Z<4FwAMF>ic}*MY%1T$qfEIA3kOQVv%YZ}NJVbvy*^XyAhK0cMGK2bt{Blm z8yt4kzb@LQ#aoPnP|-zkQ)tMgmHIr^3!@rWy-Z7Ix<4GZa>s4I4_SmDgOuE;a^`y^md z{EJZ#HfL;?^Z4Ca0T52EmG6PfF_mN<*wf?G^3WOFB%+f65PCzMmO=2oY4twkyzt;n zlRHW93f7Qb7cKKk7Gs%8l4BqY-%<@ixyy^yvmQ*HV5d1GeF0`vVzHmmxzUdYc>>Rs zk<#mJ$0kdg3@3}&+7=w~HJ~hAZi;-yj|sqv0c*}BTUMEZ_VC$uX2HCAeeLWODm@lV zq!c;CJp=ELp7-@16q_sMxa)pINIualJdnUiFsj-=P_em5bJ{4nqi`o@4W(=ecAFeuHn#OM=IKXMCPg84f-?*k*$ryBDq zVfKB;_n}{GABL`phKN8V{CIlqdu~qOuO9cc)T{T_R<1=A)eG|C5RXulXishUXKe9J*XQ4V!kqCL)^ z&A8(#67xC4EbVP7Jz0>TtpycxEl+NjPxw0T9&41G-__gv^!IXG)XI`tLeUMEWgRxz zYwSVT-1fF1_nui&Cl=nf?jVg{974r(S=Vtl9^v=iK1dzcxMRx8 z(nO^SasC)OJHU{UB#7S0q>vRg4c{tms=s9`IuB8`oDTNY%_To}{FHJx%xQ2JN@n4! zIJ&{6E(LL*LoqF$q`>y6M^}+aeL~x3Isd@~PBX65_3Csgn!dj?!vI;?z1)2oWPXzu z8$9^9lB4a=Msxu z;sB`3s_nSM1wM3>Y-f?Y1*1!si->q~B*AwC<|b>2iP5cXh z@Fv961@^~hGBN5w6DVk>QXCH@itvhed_Li~YUv3Zo>JTA0!5)T9Ex7*9h}5UN0Opf z_!gO$6bq!0P-F#zZp^iUMO8-*pK$F2aj|xYP-l+w>(Xc-paYw>A4Dfb`${L>w;T89 zPcnl&yYI~(UhM`x`sA#4?w*B8k31*`(==fj@R?Aws7=wKpI^fpIY``?h#O9bYaZ9o z;S0wZ+S+pbqM{dZ{w}b24HZ;nU9zXj&iJ#l;=3UZexl@kx?73je0Vi>2oK5!d!YciteF#x(313qV%=@g`2#|wV&J3=j9;$4UQKbz_dK8=9 zv_XK)xN`le!>bz0Ql~T*uqZimL`n+q4T@!VrsYKn5q}O)sOj??du$j5t%B8+pd6N4 zbrjY+`{R`-H&tAG9T(cm@&;m|l}8=rmh{dNL*@ld?c5IGttt%fm7AH*baVcKIOT6m zqXhV8a~|{pUO&bu@K*^3{XpEoHGS)Uj!P&z?P%ER0-J5X)7$qkMa?bA|11AC_7}K< z@5DQj@B0F_?j}dj<*q+ztC-77=!rKk<_RV8K+P8a+AlOwvoWRXBMi?~57ro8{T^r; z%t%Zbw#<|?%p90lgHs|biht)%-T_t6aWs`=SwR-;v{0$TO`R!_{ezL`S*YYG1LWdm zP{m58Iwp$lDlwS|d5j?v8e*DxG+VuSCYS23rB13p5h;+9_W%{d)GYiq3g1RdJb4-| z&A+OebAt6qh<$(o08n<^(swONL70n6i++13pzH zi(zO}+LIN2h|`MewH;_D4U(z7rlKJhf5dnJqJ%TSVPJjFF_Jn@HpI~2aLkC^KLGLU z&C=(D|0dGJR?(d?&06GC`x!$`+&X6zcG+T*mDhlB6>%7%41emx=@W(ya%*9Y7+&+^ z7@a>?I$3yJul^5(vZF#PHV?|cFnicdX7b$oeURp?GoM?VX zSlXg`X37(Bmq-^HbJz`IY_ab*FlK*LAI|LU*7DwSz_-`1jk?bma*kknL5jh>A)<*U&RQX{6OO&sN#rhO=kQLrt@0qYD<|6WCsuD>xePU29vyqaym3>RSz3htlHs|{`@?Ggn2uQT{EPqi9V)w* znZ!Z3!sWU*q1(M!Xm0Ws&wP2%RrFUcy=!Hs`xoH;M{Lr%bRYDZf~+djxKc)K!}J#I z_t9T2gf!+WU3CyFwAL?x2c8)_`6P1|BI;Yi$zry7c`P*B+u2~Va0J{Mf=gN)M1`EH zuU=!sd6;##Q&kj+xS@8Lc7_6;fnY&e_`dfTfJ*DFSrkmDB_z{j38MUB^e5rI;;stYOQiKx^_WkV*1LR@>*#A9$`4f1OuX{4 zvx2{VQ|0Y_ZQ~SeSOwASo9E?r9-* zFg8>yYE3&yYb!QJ8c|flEO`m(jb{8d1S37yk;^2|hAv)ydfP%vX;8cgm11pPWrSOx z=JdQ|xu9YRJo-c2w*Km4Z6?;^s$zBPk~{VEm4V~~_*D_Rt45p>eR zrj9ua``Bt26+X_vnB0!Qhxz2rG|Y-|OTG!-rFcmD=R!l1S4Y)U6bC&Hm3-j@8Zsml zUmPvW)WCT#rK9*X5JVl2u~3<~&yFN}jc$bCgdf2x%~C@?rM?`CiaH(U42Bk=YgIYJYhF@}dja4(m>HTe9wmMBtF)U8KMA)2gL}5-sm& zx+V_$$$AL@xV!?zSgM;7Kk=w~KzfBEb=NT=z*Nvh^tNoyvj*0P`9;mlxN>N(O#upu zK>yMH&nKGfGps5N3-b$)$?{<-xnDef{g0vc{R8i(I1C?Z;1hYD_tVQjowdP@ocn!o zs~wLEgWz$m05z}h^b&1T0*KPXcus=M#Z|Vp}x6llei1)-7AaK znXn#_oc1k#17jk0;Nm~&RQy0a| zsSqw4*21C@ghiw>kHu5o%VX$-LNjZr|2%cNX&Z=@RbMl&j8+%hL4d*?G@(1V!;{q7 z7kn$WcZJNTL0e2t^F3!2Q7XW}=-w#Ai*4V{<36n=XK_VqxigS@bA8F?=ezjm>DS^? zr|GELayhqOw|iF;h+W0nA4>BdMcs-09m#_gl4dj%XZKm$zc~A;7}!z+g3q*REy&O- z6{vx%bo$jpiP-W!wVmC*z`)78V;NRxDw{@}=}N#|?Ivm$+a+vf*&wY|?NdArzU4*E z0z${Z!L8a@B~KmK^a-{1PpD0TPoVn(!@0sM>`nl#^N19HXk7K?_Ij2&rU|;0U^?xS zAi4mXwRbc%dEb7UGs0HSZD2~Ael2>GS!7u)wu}wH{O6B zj=V*ZrQlQ_RI@U(yytE9b&XBZk(BTgH;AW+zET;R#@>RP5Htid;^bc~BQ#4twCr>Y zHKI zMpF8c`n_Z=q9JnS)gv`5KUbwQI;_@mWWO;rf}%_Rfh;Xt*_s4w{W}hdpqx>%C=tsJ z+iFOzhf_B%$YO0NL!T5H_T|osqbuuby{8ndC3-RE%G9I3b zZz5bjMA=t=m6aHAOe!uzsyqzm>_SS~V53v0p0}#O6@%BQ@JV5DP`Crb3j?I%!II>o z*nLSkawU>Tb`=&5Xi>6<4F`veD10>|TCOWbPCg&QV7=t+5c=Uxarr~Bi|J1Wq6E}a znl=fdhcFJ7s~4ub^7~l%WUmcI&?sESpnMi_9!D%IIUG|;M;&vTh)DM+G59`>LMxPnjnjpu zq?Cgqdqe4H`8#KL*mEzd$^DOklXRuyKJX4llmP+iEC`2lT3kiGd+^kT3D*X-%Q22_-nm zkst-}EN7PXicobzU&~unu_rfS18!=Yw8XKIAuw6YV#wt(@i;I#v30tFoN1iK_I8G# zQk4@ms8Q{jAynp)iB(?5G7>@G|E>NhOCMWB)_xRU0)0s=(T=QjO=m!MCcd#;w|*Pq z@@@JjM-YVOKEvxEEvIEt#rN&DzVSFvcWP-a;;I&;u;WoU2`p4rh_QD*_u-pneCwLK zo=})^=VoU*>2w>BSY7Wp-iDkad#sg7WcS>lWmhwuP{Q$iy4>_4j&-N_%0RTdp>Vn} z)=0xxHBS&RW=$?}zZ$_?r7hmWP2)~yS^DPJ<4`q;ctj~q`p1;8bbqROHQw~q18xtR zi3Py`@(plU&zvhwLn;2>?j_DwVem;FJhw_6f5!PO9RAr8{y#g*a-#*K1D{0e$lZbI z6SF4lA}LDG*eMkllM2^guC1}OdWB$= z;LCwJk%FoMLxx|7vIy6!N%twW7&PHuQV{RlJG_{ZJL{gUf9f~X**|m}L7eTEO%qSH z`b{~~iK|uk9}_ZjJ47*&;O_A3R6y82QALg9tPH*8t&}@2CGAZ|GdK4G)VG_#?j20zv^g%AfAP=o>(zMEp6`FT@2KzzO^Su>qeGv1rNL2v^-^c0L>{{^-=d zxfz8ex$$_v9wqIdoX_AF@qS1M0xeG#0mj|SF`K-}=>pO)q=Dyaep$Ze-sKpyrk>{F z0PP$ar@S#KTyzM4LX$RqiCcXnChZVO=3vLt#52Byoy{7`6{!}k$baM%XXC$2F7Nub zO?C&ru+WVj%GVOoPqC-fPA+4s#a6yF)dhbmj27+d#|#jQU`74+%1Yt1RVmIVvtO^* z`0@%F2MoWQw|_t1dKORv(6e#*dUHZcoDBzKp0g^qElw+PzCUZyPb%b#RVl)j5hg#uHVKdQmC6Duw^#IoX z_vJ1q%s26s?c%za6!QEfCc67h=$e@R$Jq6es!TO?Xk$DV3ypdx=Io91xokaKBh7#a zNvLV6!OFhaM*qfD5$CVQr2E|2G!M9fy*ljI@h-TE+!(aoX&K0?X)T~!7xsh`!$Oj3 zV;q1`-4HoCBs~A3tl-j9cSXJYLA0f(dZ_+~D~4-O{&?gH@o1~-ZDwXFcCRxXl442o znn~c&j_q5kj!QU_uk_L5d;B%W4ox(uXd0-@o+QFZ`>W5_vr4^QAt2K-#+3F`H znd+Ywt^||x`ey5t#SPGmJ9!8@OJ10)oR-E@7fyYL@o33f5pci6H~3Gn!E;}$n=5xS zuD0<_Cf=(OT%%fgv5&qWf}3_6qUpGTLK>F6aNn)uHIopnI6g1Ar@E-7pfxX)7hr&D z0vx_N-(Pb>{zqpbucPZMc9_Ah(0rU%n2v(GRemit-;OOH|wWRZl4;VcXz4-;xAH-~bcbNgJWd<_Rpg$q;F+e>} zR1eu1M=UGYSG_@;5ScnWEb74E|7i3~{$u4J9(y9$<_)*yLo8GDQIz|vml<;F*GXbG z-RXdW4ACRLN&Mt_!}^9WoFq|Ir?*FNt;`&RPc#RFD9h4xcnd#gSJ2*yuy4jta}g(} zPd`~~c|z(iJZR3+sV=c4f8_ZI@oCM?i9cNKw5)g)t)$rk4fd_~yHS@u%6?SHZ@Kf%?J3>)K*nUhO`h~E^?U5dN4<`Ry+B~=S z=K%-o=*-M5cKDkG#bjW5)Q}&xuhCf(_o;HlI~u_V3m|JHjkLyuB`9V4|0>(7t}zo>S!P?%&RsUOi7x-SQHun zLFP%n>A#aCe)5``ybVlcb7zj5kUDyUyA?1Dmk(iMd#CisYiqaPq>LEAkfY4sjRe zs;4y*hgk^C7_mp*C@jS`a#}TxG4;r>^wyPkYS$|Ml`$gC^D(}K{hOu#ubndwW^?-+ zc+^ELr7DUdDN)o`Yb^~ytCm`dlGxQ&s`h=SrIw~4me#&+wQFlvqFf>>sa>g7YNsS9 zLeV$9|GjtSeP`~x_n&v>H*?OMnRDiuGjnFn^E~H#zTeM|?^*tcZk6|xo}$5s66(=U zeMc7F)g>Y87GNOh^<$&j1Dz(z$)i92({Ei=PM=&) zXI*V=yVeq=;&~VwpUORds?9*IJiTney|t;}9UD<((9}=FzJRiv=xr*KhMjvtKpEfQ zFA$~7`+2D8nKI>bl$~>-tR1ENc6#rctUF)I9HR3Jtu=2%b;a-AH))Zd1GSNZz~rj8 z`qznxREt6LDBRDQtw+BgHWWmC&6NHYVRTgq{h!eF|D>zBkoPC*j<8|G;d0@-t^(t>uq>>sq6v`Y#K$aI4`9yjdB}`D z{RnL0uJ}m%icFB~kh`_SlU@Y*S!$n*D?WB5sq-K7GM|X(Y_rWM1h$xaykg5lj*5d~ zqt5c}7!mEXa6&tYE$mr0LXeuL_LmMJS_uX<7Ld|M$ttgQb(u6Eb;<*+^|j-`yREde z-29bS62S#A%Ly^0#%Ze0zyw^YC^Cc0-7mbkfr*5;vp+n&+8S_$*aUWo_fm##LH1Ut z>FW!5-mduC59$l;e4V9|s11+(l8IP8-42XDkhRiA<(a-p+p$;I!^4vvsG z>>>x|7XbXe{dtr|_E67^_w^oqPfTNa^#zO!?#C7kTiJQdK6@ac#vz3+Np3&uW)L%$ z&|>s+y@QRzE!0Y;y_!60@lvDOJAz)~oANj{_vYZ~<~GRz@%%^!^`i-Fp?w?3 z@_^_^Sg<;3>QC(U$W;`T85^E!nRbG!LT+0vp&p?q8+UTrY~RC2ibxlt3vo0AWy}Eh zB(!GoJ0bQ3q`*WA!QT07r@!w3gUA$>y&LR!%I&0^(U#9hDQfLS`3mR+rh>FsBA@2(g@TOXF_ql$vxk>$8`p-7Qtcnx*4!d?deYlFWF(Z=ZH37kk^iE=bN{u zqwI_g3(q3-AtRFp*vk_Jk}b3$cWq*RskXHkY+sI$P#FP$UWf+k83^ ztHFlnSq!;Nj^UCoM*dzNeqLJNB@H?X44 z83(;b5@I37T5(adZDE8J17S!?M?5YnHegeo{Z!te9IiD8T`VB-%bZ4LK;hm3683k) z&Z|5?#qRUNOkYWZ^heP2p$IK!1Yj$4wiX(rXgQ^3xXJV`mxH90? zl}EYew_Y^w&vef8u1{%%cPhMUJ;7l5e`6ub1uNoRXOEfB9eD^j=KF(QUA@cm=x9|s zKU-O{R&w0iv`T)H5YM^yR(%|vR*hgq8u*cELmjf4KRChNR=~ZsW&{JxjOJt2CD_?5 zSug6?iQ$-@VmuB-$OE=2_t?Tf*AI;o2CWAKpkgR4NUqc3QTEeo0?W@GV!pT`;tmsd zlL5Z~m@pICu`bX6AN!YwCUZTU{Y-*`(snOl3j;)}OBtVEimNou)g(m|Z&j5*D)GdE zF@AU1A!6L~{tUO^gDH0jblo(~AEFFOTRH?0ROhy_O*m{!(lG;cdcbQD8PVFK?3kj2 zS5{iPJK8p(F<7m|XLI_}-=;Lg9vU4uSELL@$r=e(Ue7P)Ou_Tz>oKaF@n#LR&;C5_ zzj^&*!U2UK=D0eicxBM*kzuTE;=FnaitgKT&ygk% zuC=%dEgMGjQJrQs${dSUxorfT%42$+jkRIh7wLXE^Y60SxTJ#7A=WXJv#H^|{F|S*mzsVO0x=oM>d5QAvXmO0bv8&zA zIBB?lwY23t{)(b!YEi&0bPWC0`Dy46SgpSbJkiVHh0-rT)0Pc$akR4wkG&o$52Nr* zxTvCAvSfB=Yv9_|rKh}<^@-=3N&01Pb9APBvB)bvyHc9DT&l8e{X_TfWoVb^e3v{9 zqI)B*8$N2)O?Wq{sd;f0jr6~a%B!yU1&BT9H71z`DdL>?`aT`fTUBma{}CAK7N^lT zmUqU3v28l0wOOWne7VUnS?5OQoR~1WrBS9ow4pb^ajMJ=|A3?6&Gyr0l^;edhvZ(T z&t>i}1c`+}e$++xY-TU`8*mu3xB6y>H$Px1zlFi8twIC&S=}2^n0}<%*bPtRLBEr; zC0S{>EciWFN%@Y%y5XR3DCF|I{#*=W&DA=wtAIY?G$_a|k?_Q%a9LS)}-e1I-sdu}AD$o%4EzlsG zCO!OI1G^N3P5cJ_Nl$fxu9jCN_a!CwC8H{fllF}ezyGHGY5n(t=veY(4e5k6M3gIT z7Yg`MV`}fq1FFs!WZ_PMUqY7C-ia0hST31Tk)UTk-fDv)S!`nY;A|%cfjAstR)Xj$ zcb$CkVmrk6&kL2N9T3{D1^6y6vcmUEK}i3@Es5cotTSR_TLQ^|7<8(fogux=?49sw zyqp1tzz1BF1q#TLY6O5&zu6a{ce6|jz{#&J8*Qu;DA$x!7ww8W?B`jdRCw z{o_JojeWLj4S%MCK-T-80~ z)k_?9adC@7VidV|*^Soyj?8Ag7_d&pKI}>6P=*Yn9lie20PrIq3iHNDrUDr0|x=8E*;b13Z)n`sVxsP3`uzM#A#s7ashL6N47K3 z*J=t+3w}wN@Gar8`qH-SM*LP6o+CJ%{Bjuo^<(xUTdoO{d2BT|gHVXCJ?+{P+Zrf; zvA_jhHwz3gI|zI7(2V>A^)XSfc^CK`cdP74gd(3YPQ(HfP_bqiYmig8mQTZ8G#pa)RkP3O<y@ zv@fUuEeQ9?Q@TxU)^~VvrSVZqotY;xHhBs*~ z_%5r@@vGXBncEcE&-uRhK1?51PplTt7l%OGZPN5mldpBQuP+KU(!A5=9=3@ANo!VD z{tzlyo>=oY_FjS>D42PEBGpNn^KjvA`G~Wj4<5v9$q>fj<;GgQ>AQ@q6{F(M%s@{S ze*wN#H9mp+?b>RVi~y7Nl0;iA^6yEK^qsG?-`14Y&|i#{x0F!vs0nOHNEPkBpj0x? zv}Y;WirDqfZ~_qH#~spBLKQ+)f!5X)*(_qwV`m}|F~_gU5%Cr&QJA<;51_R&n^f4) zEwI8kWc^#Qhhd+umpyF@i7vDWXdPaz80VDab#7*mxum~elZRE~WZ}VHZMJCXU)Hw2 z(-Gj~%wk z>*8CNM2}VuqRt8QXb}Xivd9DDnT=gnw7h8Eg5p1o@#mI^uxFIvW2=!ZOW& W#{RKw^gnC%&ke1=*NoxUr~d+rtt}@2 diff --git a/jay_imagenet_for_co3d_val_0.1_images/ILSVRC2012_val_00031840.JPEG b/jay_imagenet_for_co3d_val_0.1_images/ILSVRC2012_val_00031840.JPEG deleted file mode 100644 index e786c15035b0419919ddfd8ab542408d2a30caf7..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 21394 zcmeIaXIPX=mp0mniXez0AUWq2k^nI*;$3{eJuPd~?psTzk&>ehgAq<3&+Vt*TYE?z-1q&(-kNH0Z9Jl&lm83kwTm z0{jDAje#UUw{G6Vxq0Ij4h{}3?kzlgVgmfzxADn|?h_JIQ_#{-Q&3USF+F0XV|d6& zMaA}n{o!M7K0ZELRsrFsJVK9n`FMUcf`yBVi+>xRjDUcQhn|X_=Rf^*^#gS87S{B& z57<}?plkQAu^>)1DL zT)z&~b_aeBx_<8l5k0rq&HE}wI1Dz#JRieSZ!wA&){&_8ZZq+|w*7>QNBV$_oPwE! zm5rT)55oWCsequw3rQ(y8Cf|sbq%PdmbQ+uiK&^ng(b|+-oeqy*~Qh%+sD_>|1%;y zA~GsE=1Xi`T6#uiR`%B%C?|G*$-Xn16FW_E6VVR31B zWp!tFZ~x%%==kLHmt9yO>_5!}sdDA@O|-=OEdNhGF% zV`Oumf#>5bV)3xl!a7_=Ue#@q*S5WQq)dD>%sanK`-f%!F~dInmn{3IVgJjnQ4l^h z7BG3(_duecqf@rD4|xAm{$~vSPx1lqq8s*X0yY-{(kyc7U3;$ac5HmD_2!0?#@nL; z6VWq>3xpSOcT6pdOz!)N@uWooiN1t3+0VkS$*BG-P>}3Jb}rTxh#r0gI>+_g$#+Ut zxrACB;2sIbA$B5iR~s&0mrfnuy#mdt@S}4-0yTBF&To0{q?!*T4;vleItk|^whxCO zA*Z(uuRzoCK$9;<55MDHfj(DXfiADt9uMYi#-BYDNqw|b(QAM=Nr&+}Z-x;5dKj{M zmh+uwr@6`%=tuSyhytA;b`j0>wWEBWnq;riIY9pklroul1^W2*5^`^craI*04$SlT z0V~iXhPJ)!G@&wCs@ITT)W1qB7_#xwb8J3&PCa{v(Eq2+ET^8`=&AB0M{%$zPEU+? z_^P{@Mu9HPT6> zC#{D5E!A<85@nOlGBCElZ&}Y-J%F021$tMYx?k0;&D?7x--}&=aHj%Am*GRA+s03> zKqS(?*8}|@UhUl#=oZ%&MAWAOvT!kevib4q#a0wE<5!@jOO~&hYJMUf_>* zU*dFJftpkf1e_iW8rrSvfaeO16@@geKoa~#fMe_elc?@lZ#i3VN{&uGCycoQA<|cE zH${;(OT)$gfo@0-ER2gF&yrvF8dCq>&gFe&63?ElJix8r*Pd(8s>QrJ&E0elkbLg| z%y%}TcG(Q$xh-ON$r18@>yx{Ze#3vlIKuzw?|*fCgJ#GV%D2sYgm$Zn@B!eyFb zhnq9Pm6;+YOWxO(UqOBJdN= zvVzs+YR@4Y+o6j4rC7bM#p}mcIQm%vN(<=x>%Vqhr1qHk3dH0meg!HGxnK*gzE%ae zXv}#y_3g_^(fu^JeLN8GHy|)!bMh{Ov)^T1jOZzQuI;yT%PSBAtH0+ux!n~gEecDQYo<$Hx)=1Rce3^7UKmstN!!3kE=g1S$l;jlYPIc6T=%x1sthk1ZG;8N)TzT`H`uxs@KhC|D?Tn6;%j?zQL_M`L(VPZ6qA%m-lW4+*pa(+APcF`B zXEYX$ZL+9NoOnZy?~3-71SwQ@#t13DE;iuoPkhe&aiuTY@b1cP606DqGW`5$z@@f9 z+woUOyv_9O<)Q_>=HqoSlJCLS+?47M2cvfc`Kmq{MaEK!_(p9@<3Jj_X1c69!x_x5 zEBIjrV+Zp0KMM^UzJqIg3as_xYdXj*jtZXWCXrK%tMh8?3&ah5QkL7h#$QU?RD4Py zf>KRdu7%^iY3UTiyjcr}%Smghs(ZzyKAy(x7xTu(2+rm_uFLIr=u)*tFH0E58C#?w zdRl)u=bz>Dch}v!0(~;vm9ghKbG`!geFpdZ5qR=MDLh+0&(faV1VZm}7OK5rC;Qw# zi5>{srrnna*w=Fz;QM$rPnVw}w=dU9pum(SS5sQnlI{nJPJ+`8I;7RC2$!%6!xY#x zHyI^v`sp==!ig5~iQp-$&d4Ws=P7R258Cu5oDpObN14qEQJ`MB_BsJBh~%mN7$Vc5 zUL)2bS7dd^Mzg83qc4XsF4KY3s7eyXF<9nDo;PxnB0}n0G7q^Vu`1tR7wcbt7HQfx z1o}+#tIwsE2%Ygf(HmEw*3UCrXE${{moriAJDEGYtWOe$^sXs!wtAQ$x^B#7z7R4bX`Rcl+y@k==jHI5BIEr>a=1AUg^G|H4~ zPK63PKkHs&#V@vyw;U8m7ahaXM(ca4-m<+Rot&`QM?EvVWf2Xlrxw8~Gx}cMdTw#q z(_!g(C$Xi?KYI6E`92LD<}LMrUR!8pMN$Jq8$u0TG*K4OD{OWt@KAi9Tq;^o>?1l6 zi1%h&d$idkjPZrRnJYBlS-P8e%)!y^)qc0ull`exx5CA)gtPvP0S7h~lP5$AN`i4m zgeA*)&gJMUP>qiiW8W6Fx1o8IAJZcO>mvUPr0CE%Ei`72<}qYLXYeqf9P5Xu^I+?_ zah}AdJ?rZx){E6rb7K9EaCx+TOfGV39ZeGSS-X(d-6EsJCCd#q_0HMpKlfSBmA0fR zrV)w<>FK5uzU0;%Y8+KO-AEEHU;lAW)@_>^bYgD^M|!nhOTQt9Qy~}i!amaE5DcUxC8zH%uMkmt28*<#nU{rfW~`2?<|;UUj^^AV!xP@YS%m5KZ_HtEkfb zft>$D(Esv@y;#jUvs>i?xO(`VDG7b|q@XNI?$=M}f~3`Sm2a^ZQ@J7bxcqxp1=VGp z-tq-mx8UQRWz{~*)G0C2P>bB~EZeV66uh^nQXO2f)(C}`%aCM8d~fEJPl5Ys>PAMB zesZQIpv0%5@jDr$#WomSy`=dxqjJ5SU;g8{c!d}oPtRor6k**rncnv;Kc~+&ZT2Ka zb$W>})4{qSNXk_`L)|>nmQj`Et3Ed27Zde$f){gfn1|Bd&d0aj8#@-hPuX}%Q%15z zA(`Pgs+g8xB=gxWB-4d8c+4|sOUjieq(`%8EJatwFO6`)P(HRTkNL*JDPuUopijY9 ztOeFI1c|pI>i=2xCZe8hwYijOrLQV`R-)A*-r>#3YpQ}QTe}Nz%%g@pNmJZ6aOnH6 z3jPfTl$*M@-k4T>^wOqFPzT=q#x9aoZJ47OZoCh;^=+o5v+W^x0&gUhasGK=`mqdI z*5rz;3sQT$l{k;`%Tm6Zrfw9o>dS6kXRz}y657Pg9~k24MqPm< zjmc>n1}HZ4u(Vyhec46^pM5{n7@*Vp0QyFP?W3tA*F8C{)SG zKHXyYH0CD^F9efUOZ7^U{0mC`yMbOB0z4x8D#ZZ^zddM=sDk^Z@S7vsl5fUpBz2y0 zKc;?HP7-AMg{9P#T&Jx6;=>Yg+NI|ZT|E;V9@_-TO7wj51RK%TRFV(rkde&D4U zu{kK8@0&NB$@KYMxaB1j>)VV);kRqkVnMb@@V>&~un)444c~Ybfof zE71Il?V}4X&f0xd%z|^@aqPUjBe9C4%%{KXO}|4>LV;CYZ5oEY2(4agE3#|)cczR- z(I2UU7+)dI;}RB>U@8+d{j$s5INOH)Oxs!IcG=@Cj4h0BhO3>hn_O=kOnpYXImuJ1 z7mGi&jK}w^FH8PB`pi;^F%V;e^!W7JOE9}X)uV#s?+R{nJY@}D z9Tk{;)!UjHdFo{-%H>j?Y2wI9;sd#V0fRE6*bVgi)8NeR)wVd*9!yB@7Fdb--@oWJ zATLlTa>i`Fw5^4&trNH&_1pJMjkNwK9wHnQXiO_b!vje(7LmbL|M}fqEG& z-PSP^wc*pDoOxn`)u#oIcM=mElbFI@fm{$&A4h^*nZsL4NY}Vd>xg`pYQV#tk*s8z z!3izGq=U1kL^5`L+aeTN}^hj9+cmR22s z`Go^%?=?WHbGN}jjO{~PN2xqUQKsu6G46fuVCUvwjm`TEU#BxpOpWoxd~-?D!xYNI z31ndcB-F}sN;~|VE!PqSPAtFKXC|DZ&10`XT=;G^2XguP4iKf(q)bt705nPh%TBc) zZCWr%l55$c_BrUScfZQ+;In++e6De%lRkm=Qr~f>OU;F(>aV5Oc>w(t)c!Ocd)@Fi zINIGlh{07PgAul_KFA z6p=dCdTqcDu2hv$mcmx!pI14Dj%aOYvw_!^iSb(X00< zi*8s~D)9(&WR}EcTdL5q+)6db_!!bTFvGo_?Hf&#;Sw?8$dSn|2UBq@?9lI?O}_Vh zTMSMUZ$($)lR77B z%*k%=F_~w?;9#%)`i~Jw9QJXPU#eP}W$#{rh!PNs z=VF-#h7t@TlR`$9`!CK>$A_uv(F|!C`V}tpV__EgwlwsE$$A5r+piSZ#*9_Yr7;y< zjPm*{O$^iRGYf&ePmi&~9-DG(|Qk#>Y_iZHZxk(7C5faY8l@^`Wicub3RlU_9c7{nF>7yXi z2QKp`Um^^;C{%Tdl2rAthrM&oQvV*Pu7N4xHIgJ&`_uKY4geCE>x4=0!9|tl%8RIF zlBFxq_1gK+Wy8arD^MnpX38J*g_Z~3FR<7sdWc^RfJH?BEY2wdU{T^1SWLbk9#lCF z1HN|#iB$~R*8Xwm(??rNYG{pA;8j{#Pw!_1u0pI7;dY@M+6?1@sygD&Nz&+;?6Ve( z>kIVNVtXaW~!yg`k&}MUur8D7s*e`&{vj z@$Dbi%H7|ual=f8g)$;y~Sj&>Gz|3X>WB(3G=0GQU zARj`&y!lGYIF>prz%+E@Dbu|H3NP7#f&+rb23=NopYPktQzn!aw$7pt$NUv=7yNfh zU(b>4f5qIIF43ZY8J04NtOgxsci-kqA5s<@`En>Y?;02!0JkBR;%JH_XWlmM(3nq@QMCW}x6UUf=MDmwi+4D8QNzZk6lD^e}`jWzsFHeo= z4^PidhfIt~Prfd1W!qA&FkaiL*A#r?yCh!atb6HPX{vm3^mYAQnd!;3A)OCn%003n zk(ipWudABrPhQdmhTA@6A9s6vM8cn)J-YgsEZhI-x=rgT#lCP!P50e`#nLN~*B;M8 z-!{k8%Asp!3A5xoDGfiBO#)k|L)WA*8tAO-y87H2PEExYyoK=Ll${C4}}HNJ*`28Yom~o(q?Qo4ax}?c>yBJgrB^z zy!t{%juons*0RjO_uM{S5_jx=nut@dY7xl)q8VvYZfsH8=tqe%$maYyUQeg)h5c@0 zSOX5XW!SqlzyEb(`b>Xu;5v)lK1qSlYse_hwaSb^zLdb|iZA_lVAVRwTNFswp%?u0 z08m!{^Hlk_;}!PjOw$^bZY-gnB;ELhWo0Gxqr&;^e&l}Bie_Xe-Y6b7Mht#xy)&?{ zn`F(%?M>=yLh6dUXAL znAiDAJx}~0u_mPM3M8~&dT`;yUvO|+n13X@Dv=@3$rw{LrFUFJtki2sT4S`b*wMdD z(migO#I_SWAujHFQ{YAT%XFHCzEUguF590IbRptB;sxUC8OeH&a57eYvW0j$H|l9CuDN(+cnEs1|ER7@h|(MjcdMRx63|oX(x-m|N5@yn^&N< zp&QREiWaOLx}e(*^-Xo>T`$Y~XWWC*M`5OQXMLMR%s9g^({KwK^+2O2YA9;Zoi0 zI5?m=7@UwaXUH6{(Z2QMVa7g9c|Lh>BJSmm!I^NEdgM@r=DSB5pg8K9*XO;ZPY)sO zks&WC!*9##$g(_@kv&lxj~k3T)t%!SCO`jpl&_m|s4}&tH>S`x!C4(1?f6Q}LmAw0 z|9HrnG%~7xtn?)Ir@WN!@R&#@k|}>tN}@)yo4gyOp2SiWYE^PegOItBVtM_cQNPs- z3G&rWw>T71yTJt)ABW0oMfahCzR76s8+osnrE2%3i*`BRV`zEzj@7u7cx1B`^CaKW zhTn~S5cvB0!kNa*_#DhtQ=Oh&@u$w+QFIv866^I12~~Nrpu!}W;ARjiH3B#zefjj~ zy8ExQjN|~P9`Mgh0YEeQQTOf&M^X+GfwR6Bkx@Y(B?g0$g{C00>ew2s2{JSE(yg=1 zFL^)oX8mrH$TPPir=f}q)1W=Q9<}&^lXl|Op`^^ukLr(*wuT>EF`Pfs+v@U`svWJ- zeW}Fm1o)ZEjt-%d*1~Xc_T6fD{n_h^I7hwQ0*Baai9q0N#_&A5xmi-O7c_qSMzzQf{0 z5K(Mpah|ZF9HYvU!nl^U?B@9=IsySvWW^59A36G-g=)N>iG)Y32dQ` zJOK*k@1tMXLQK(R1_`e;Zua>!EhlCYv5M!V3Oc?mKO~}aT?G9>g zgA!l`A^;ShI0MKT!mmJIdb5X)@1&1q_vsALptPTtc3yoiax(I%9ubg;>EtVLN z4dnlvnGxpb@?5Em77ayNR6TG@DC|o)M6}Wl#I4O?_{L&|%HF0pvVy?Dib}%=TLzzg zsGS)Wtl4qBnQWnC^?`uyK#FLKp@k?@LucNX?9n^aGvv*Wf!xCKophUg&n7^iGBOJ3 z`tg*V9DJ}Ws?$K?=R={SK2+T=c`4cdbpb3K!sn;RaA=%8+=1-6{iUm`<0Hkled;x+ z@U_bejFt78E7wFxfH$okG{kFTa;l-1FLvS?x$>Oi5G3=vk=2_dhxG&|_)p<37qtSY zgT9~}ifiZ+r@cikQ2i}Kbh#TrRQ3p+LKjCQg6Yzu9%va8#4C&Y=nhkC83?Nf+Jr)^ zw6Ub-C=}SwJz95GSq9(P-L-;=GiU}kDR;J4RolOmZR!u6Fg&@((y!`vNV>VXq!Beb zPFvK}dIfre=N$U zaa;JHe&BV0<@5(AmwzNO|K23#2~D57H*}$T`=vm>9uXl4$2iYX0qo*u`?1B~_)6x0 z)Fe8{#lw(}B5ml-#43XwCg708_YN^y2Ku^%sYehYr!t`WjKE!J7o1iq%9d~42>ehY z%9hO;g2N-RxS0N#XYgSHf}ZwiPK{q&ChSd*??V){D!{ABay0At*Dd<#9lfwzShNt* zAptf%TQJ=w1eK_lbck`BUM$QmZFbJz=sk2+kc+$*EJbj;0WZ$tFz$mxjxh0NP{94x z{NSE#@Rx6*kMpCuQ?j|dw~HiGvqU!3DuguhW?)er@3X#r8rXi^d(?S*^T3aF3R3Pi zS;%yD(95{KT?=XlGBT^bsIp(>Z%dcb2+T`BhF|>%b-{_tneBREdCC{BMDBaTVr*rR zd{lfSpS2~1;9=Mp+e6v2^lA?q?=0jjBjp6+Ktck0J9J=)@2Pd9tR2;%cH|P#bl3@| zEkd;!tB>%k4u(P(+x$pb!FOJ7&J!~t=}d$4n7vlwt(w-X6rEJ>HQc-c@rD`P1CouR zyVMkG5v`1$2=0yVgV-UX78KYGZW0f!ld;={52%E5f!u~=^4unFpOldr=6x98 zK)2U?Jg{Ia%=0W+=a7E0ct|2iC%sFEc_tCMiV?78l&6FS80YsHSliX&r0zqONe@*< zLD+4K#+386jh7B-bFeuwWEUnr!p$d~{{4L1vle}j2eI`xnmGRf21;WBEfoE7ad z6;!<7R(3$y#!A}5e(fpNc;i9aI=Bl6PB#D=gy1;7SBIy1F>g#NHl3j=?5pOND^RZg z9RU%vuY)_6I1zDJ!PeA;Nwv%Asiv@JUbkZw2E$dPv*d5QI;A!~7+`f1uBE!VIgw$u zK!;sWNmE}oHu;W8toriHzUicjksaeyx3-X#F z!=g^!32IU!m7Sbuw`S4)Y|~)Sb80_G(af`M{;rKLd*r|w(K$|eIERz7_s!jyYU_m` z9p-dEUlQ26M zYkqnw*h@R0`7H05Q2k>gf4cnNEbaLtg^b5WnE{Z$onis@tNj= zT>nAw-X7lS!&AJ4Vjy1z&Mp$dqpf#wN$cr*TdAHea5JB_QX^9V(M-YnBjv>8m441* zAq$!7igoJrj@E;)4P)Kvki2?zRY^Huci8;rQ2kwE3K|`=&KJAj4k6V`k76vY51xJ~ zSG%&uM8e>o)-&NthNi*Ky_3qiJUk;?j!+q{S}~j8BckT6e0>JlM|DE)z`ezs_w0vx z`h2J|TndNn=uFH@637+oB*dPv6A2V1NHz*s8 zesNX1MlOJyr;1DL5?_kDF6A~TFvz4zEk<)-^-*96k;{55OC>O-if?qdp{jq~2qEiz zFI6S6PMT!CG{gwf7{q)qfAacW?#I)({VifcMS=k7H-%L}%~T(_ryxtJ^6&i!oJ`+d zPU~HMsxs}^$e3BVu=ttnp`Ll+!Hh+nqbHH2%)V}7e8K)+6s)smpqmroA*MDOZAntJ z1Pd3~mxPGpGWw~1o;Bwnvz>tFNGA~mtO8my)%!)t=d}_g1LoXm8*?_zTXHDM;?Qq) zmDGeNt@2l|CO(YYLzG6TBA<~j~hY*@Y?qt!#4677~|S) zdbVx&lIe%F?#*KikS-A6#Gm&`=KQUags4Y9cj=dQzoc)?o}~RUz)f)! z-Gb>0*$gA8pNB%~XL4N~+A+fIdlJvK`+7W*M2z$miS#>nm9p`AvY1L4YqaX03Qk7n+au=Wx--Qk#IYE7q3H>@>Uzqfh zEhfRp$=KQ;JmHAS>jmFhzG}_yXlyz>r4!}dQ=i9J)h~Ij=-GB!u)^bz_e~dH@pc)l z3Fc8)Rd#@15q4rca%Sy_9+y1t8ZH8X{b~mtz$Gm3N_FyS6Dmz%1!H8n<+@IGTRCX~ zQX5w4VW+F#Zu5cn16Ux+=1XxPp*C5ZHmj|t3(|Ay8>|RjskRLA#Ri`UA$8sXm` zjc4&!m@>?yuW9dvMaa^Jvw%la%pSVrw~CL3F4zEIb0GBBRt}WEo%8!=juP)#uO!Gq z#}Vc4v0S^%HHom2hl_R!AWNy)OFAWb?ApVlc=L61e}paOP)Yd`XwXU=Y}#Xr|m){-u(jX6Ft+LnhUe&2EYeV?&yBHDg)>v=;A2 zy~tpaHPnt`Uo;)X3~T(-nvRK0!neZ6BcYStz(U@a!@Yah(pnY1w;--h+My16CKQJ& zQhiT;^eyDs4%M!4pmcyeOj`pkU4HoP_$9FLNq_AZ?g76Ec(gozm-x)d&zgsE4g(to z!=F*;fA=XAVlix^y>?c)|5j%pSf~J!>_XFm|BT{TFZ{bN$3(X@2@OoSb%oBV7tc+D zQ^abAOBgu=ISMn6DeG6{%jhTn6d1=0A#udz=aIt0WhvXb?;H%)nw%6UX_tPRKZ zHbteHc-@p(b=6GLYqPUql$oW97?`b6fPzOmqPZlH#3w3p@Dt|O2B!-(URR(34U;HC zFXpjJNQ&w^QA~ZUg1QpZlEZ9BrqKhE(t~4yI#0vxMZ3n3BP(ml)j>*K0V*v(uF!bIe0xu*s#~|&6O*x9usSubV!>=UQ=;6SZt~{+5KK4}`k_ebtYmXaO_4vp znsd$&Wr3ty@Eq)n31-B|8XzSoN>VA1-%ctqRcVqqOOmH}sBH0WB$(H_ZcVq@F~H>M zYS>`g&9j%)PmwpV?+OTEig>)S88cs2*c9&>Xb1=g=P8PaE%_IoPo76dO%W0 zIFT!z{F&UQFZIK$nCiHOb1+ku0?D*JhVussS3s`gn-CsmEHYoPLgwTlY$4C?Xbtl{ z#t*f2T(P5-7jICJvKf-{V?Z@S%K(aOuZ8G@Y!2|?qg8qpM-}?|DX(kVWx;N;aVHJd zYi$E4ZucIwOS#W2OvuBK#LjphYv*7L7k++Y+!A%Etj?2lS;&`wnEXHpL@7>Y4EuV@ z{E_LhUz_?{SBl;s54oQ;%Wy4W#mD&v!3IZLakzTa!3*hRHMEG*VTRz{dSi7CN1qd>dlMdfd$%`*8!e z1j0N8cgck$i<75l>x-6l9@)jP?iAz%>1@1tr^9wgUZrSBCJ+VZ*lQ*|lgo|RFJOvk zbhlBTY9ykGT#%}kG=uP!JafB{x*ed178fp^nd$ezZ{C+n42`;pv+m2#e&`J3*_ zcnmr|yIJ;Q_QtuQ_>m&OiKmmaLJk@nLI=zi)y~cLP|>$5Sx2co552O*?w;plL%(=Z zy)MTFC*@gIjtvZ6$O%h~%B%$!0vCg#3kO{wva*XZYq8_*isT|_dED!d9Ia*F&+oRv z_-ZYZ-;E{1n%fT2iq#q6w__7=hs;`@eofXkr$RKO`S@m4;P6$($$OCOAwpL<>c3kc z?O!)_cCl1Kw=^;$A<(oOHJL|XsyI2e-2X3<(0~g(K)E7@{WdYe1uD9aFGh=9j>Eyh zQ7U(6^7ZVIa_{z=6KrqTKW3S4vJ-^c=clz5A!eE;!08LTGNU=ko;W&nNmNhd z)(U3Hy6ld)cVXYqE@_rAuc_QIW(C($J#_M9cfB-jzaJcSzai`ZdFYCm^goaCF&-`H zIgjelI~niL@d5u-g!&4hwaU*1)--PTs5*YONoFQ$Z}2bDI{h?Fv4X&Q8@Gg}dpwv# z^S;ka+d*m#W;2w$rU=b_ki6EVneXd9AVLgm@;81loPVEJ`(|U9-p)d#6vZ_+VTS8R zElQU6m26!#`83$ppULN_Ary`@q{etMfNvp=>A zKIkW4R-wGz^=h0o%I1?G1=1mMB2sc^dfqw=wgW`d+hl&t7NsJl>WM8&FH) z&v)H= z+m8gU6~1ooK$NL3DyRnKai^QCvZJjxPO&sNW;gR$%T%P;kbk=FpYoyqP8|g>-xQ=? zEG4PR(cj-_K1Cffp1=;Jqk?kURm%+s%$+r!_?wo=AJei|>|a+R>ThflH_`-n;Lk{M`7l7K<9x7Nl7WCEHCYKiee1;%W?1 z%gQ=vEK4@Zn;i8{k5v~w_6^^;VZ}9`X9XM+P2AfTQRTZXAv+3Z5qo`OZN9`E1=TMb z8!39hjiY=6Ih0m5X9)m;motn)?zM|UY0|lFW(7frcE=$|y&vTQUk3P2?&$S{CDw_) z1F+*CV#&YHmqRF8uRYCC?UH_)8$5R{74D;iJ6q$Fq_AOWu%KCbuT-v(Trm-uH)Nr# zqRw2iTh{{B%1%%|LTWOUbHeDphYJpt9i(7g$3Rn^4r8CL=sUxBy$^U2W!hPi z{k9xwIYUT_96U-kk(7Ys>x-F%p>16B?4j&1kt-Da7~vpLL6kM%qLi*)5wtGPmB5Cla@IVNec+0hD${c!C;)C?nlB{V_i<5yMNHwDxtB_d%%r3Wsymdpb`^(tT zQe4=kN$4ltOpEQ6Hb{>TSTfyBBgP%caSCO6!MG`$Q{V*tsydG6XCu6tqdFhLwg7Hl z?0kZpPTkMj1(lt{z89*6TFQspCBf1aB`fic&@JQqD;ylSZEb1Aa;+}{0@Vn06lM_* zu>c{^FC_kF`EN$#R+b$Jn(}ZOXgTWPE;#Qy=%ZYEv2tA3;;cr6--3{)uzkGVkf9UV zC_J`Sfjcd-JWN2t!wN?WV0-wlK=zsfuiw>IP&%GSk~s2qLDM%0+w?Y5=CSn&8s7Vs zS4&rO=-y@z&bpo_A6ITBVx*pNcHr9jz8+=mF0A}v^&!qRwBLAM?wwC)-d!k(lL#v{ zO4X-@B~`3TSyacz8Is{B$X;vPkR{6eYL%c4^?AM?bK^X=OV3_9Cq~VwJn{%*J}12>1@u*MVt)gLk7(_}9>k8niqL(xd}l%|x98a2NhkYrI!Jx~=L@N& z6uz13LFX)$U(W?=$8%+8b}qX6zDJuXQg9Fn9c#CHdiOUc3tVCw1Xh4(>rT}>Qn`*q z<(d}5*o{UO8x#_NyP%nu9m!`|)x2i}o|V#v3?VZY9G++uL+CG=rm6D9s6>|1GtJCI za?c8mC%t}0mU+E6gRzS(N(r6>e8Zih-a~03!Rf~^oIcGnbXsQw>+1U69Gva)z5@5t z)vU@``_zz1b(a3;mlTs^!>uur=@E6*Ip7rcr?fN3@Ri|Zu=7v=eO)>S0?i!HscFLD zf{Kh74j=$B6d(Sg(ERV-X5)-!d>l$t-+==Pt@6QNi((dtWer$;;#HTV&>f$mx>j)_ zba~|?^F3dBp!w3%fytVd8oxn_ z8?B~SwnZ2q!e@Q=x4Y>HX}^>zPRA*;_eCf{-Tu}^xuCBp@f+K7j&^nN+%LnMAj949#%hsAtG3)Km z)GuH0_3f8uJfM5b$(^dA7X5Vu83Djk@tP>?#+oS8xZKuhJPc>}h>pxqAF?$7${ea8 zU@}{g63ZE7rcswS0_YKe3;}&x{;F&`J5P}mkMq~0@q2xFj|H-dUTLbn@mVz0)BTtP zW|;-?>Xzc)fitZ5LB_-tnjLwl9pkJ-B%-r4Qv$^iY+1mb_z%G&&d~2VNX_KmIJ-Zt zwizAw12TU=5<1KSD&wVuw=~+qXS?Lya6-hVNykCd(Uqz?r z?4VDFHd0f-{HV#^G4Y1#@E$LrH=6wf3~1X^Q6nA;Mutae&!}8H7WrjhVP*_zZ>uah zlwF4&lDARASl;)urA?=WI-#Cnx8}9injIS{k`HQr9@3(kEj3a=*^L4qHhLhaYKW8* zUAP^KK&ztKt!{qXGHHf8p8U93t}pclTgS+H&e7gfeSF@;psOvf-qucaT(*3bV&%=5 zoXtYeCm@?{@n?VkMq+_t(D#O+CP-5N?gCtilG((*gFac%QRKx0ZW@>qDAawGnBP&m zNtTCt?Vsq6+;~Kw%mtB?6hOTS<6+gd*mGIaXnvk$GCjCUulKFr4ijfeBAc)nG>@n? zd-S@atw}BV4R)8nmnfv#{VurDwFq*i3s`PBi*H*O-y?WFw@f!wGbYZfAKwbmxG#@nQfH6b#^nHVpcCgO;14z^5W< zAO7d4{Q7kNow#-RO;G!X!u(&I0=i&y+-d%!hMxA|4>j#C9+$dzXOM(vEIOMKT42_a z(}~zupEBojxfy`NL!Xv?CXlg9KzheS*1`j}Y(@D|i@zsyuoC8trCuU*ZWYJbKE-S_ z^+abhOjf70(mSE2MBp7%p%HKzZs=vZU0;^GG7Kv0r^?nnstj|Ao{U14cns1N9cH(i z?cC!bC51R*h%z+lU|1_^(zl5Fcyjy|Dfn?f(Ea;24B*$sH%Oe7ee%E(2`BFz9;iCK zC>}ptMx#DqBQ)xm>&96J;d$6}|LHm}1(G7wu2&tU+1ExsixLg1E zjP@>{Mq61E_lG7v8DYyHX1*_m;{0N9ip80g1Ey<|w)Ql`P3o~yiZXqJF0{gHT9TiO zpHJg`4!$8Ue+}93n53VqM8c9F3K`iC66_UjF@W>C>?Og!M}cGIUhC;OEPl@p4To9k z#UX#7UHPWN0xCIV+rE;+=A>VK*HqUkmm5&eWY2`lhtD>v$Rp%T{>uU`| zN`T6v*Gg9-Xm;y_=wRv$N6&M)1nJl4TPmXQ>DR_bsS+Fs+zTf$X)P;whd{Grri!7Y zGg(Kfkz+E&Q{0x3NTnvv?|qa;E6}Q@jJ*eZ*K&B+tZJ;Ia^K=$BT&|2uo3k0=Xpr^ zC>m?gdu(Y{q3^4~g0pw3G$nXIQ6Y}cyCm87sJ>9W$OMxcnv<`nX3(h_ydvP;pmWOi z8i!?sS*ci(u_3dT?;PBp2+?_gy#J=8GwqAnwW7(b$rN~j>mz$<{L^@quA~D~6DBBU zo=?{sac>m<0&qY330R+-4nr_B3F(=T+>QHDMP@9!?)WMWE}jTnM0BP9@43Uckcxxm zij;J%J?8)}WQ*0F1Bh>Z#dAi5@RR_!w^aaWm<&}a-N6w?h<#8LS}0_{d8jNIveNJnp&z|MbFu5PeVNC zlYz?8=j^}>K!Ez^fGP)gmbvDoVUuU<9hAsY;_YNt3A-(W0N~;RAkXgc1YVYK1v(Ry z*fs(F2Z$cSJuecu)<>s0j#J$%j)ayc+y)dKwhY2OZUb%c;#|sJfezb^j@gvYJ4H{J zuRvoQ(Ym@Pp2S>MG)u9Np=wRHWY+`6?H9=>Uut*duRtYggg^Q`7mcq#ZM2soIxSqt z3Xd0OT1P@j^2x5*59f~xj6|;iZ^VfOnrHGnGnMkVW`1dAcpi2I+K8%p=kP$xuBCkT zd%L3g`?w>z`5YMW z!_i(TcdsPsZji}WZTK?jMFKDIWW!6f+B2&cQ$+@zi@+-y+MTXIRw$ydM=By7MhXQf z%In?9C-JqrK%d4eMQ36?SFM3cZ-Gj=QnnT+mx!Y)(6O1}>AzJ0CcxM93Y1fM1!Awg zD2ZkLpki<={Y+PQV^$0eOeoK?$Z0Bj5?h)KLCOoa)W z$IoAUAC>pywcdyy$hQegFwe4V8p(o`@9b-8QNz?+whRD)2L86^G7PW;@D{7|OY(WZ zRR*s>fFoay85Cf0js(ZjCYM6vJClmt3nr`GF3EvAeE+N=ea;I^MQSc!B%nww2|=lC ziok@s8O{u=Sw}C{hH7V$2rmc>Pn3ZfFZOH@qCKUk{l|1zoDPc2bV-*ll4^p&y6mBL z`8beKvF#hFLeA-;hcmz!gT=W%6_5eSvjEXUKzcUDVd|gG48Jsw3T}`pt^Y~1I3stE z?{WDKYq8q>62CIgvt9!5aKnr5U%R*Iv%AmLhk@6G0B$Ll2z#u1X+}mxl(xm#$W(C> z0>Ld8L2uw3I=6M7GmHM&w`W(Nf&PCpT;l-g-tD5ER*}SEt<{g;2_N&ENxTBB0`C#| zbDX9wv;NJO47Ibz3#{bJXK%D^$%ZSs`JMCDUMT)=+m*&xI?w4b zU!GqF_|wj=P2T^l1IGJE~tE|2J8)1%0Q2VY4g-JogMyPY68uUixeuC&kSe+fu!ND!v%Q3|FYQK40v5o zlgN^viox8!K8yBWJ8}@G->^n}mXz~b{z=_l|JfO>cz<#Clb>hQ#g3O|6P1T-dULGg iEeeCHdrX@nOY%?vM$!KwHUFpl&lvpw?E|z|BmWm53V4tJ diff --git a/jay_imagenet_for_co3d_val_0.1_images/ILSVRC2012_val_00032692.JPEG b/jay_imagenet_for_co3d_val_0.1_images/ILSVRC2012_val_00032692.JPEG deleted file mode 100644 index 9315328b856fdb9e781d9a08288cadcf3d5ff72f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 17266 zcmeHu1yEeww&p<_x8SZpIzVuzAp|E_aJQhrT^k7ytO*b_NN|VXnm~Zy?m-eP!QBba z)A?uKeeeD=_sx5^Zq?LO{mt1$!P$MzT5GSf&iAdgHhdnw0uU(4DarvzNJxMc;s<~) z0x|#wIywj)4Fd!MVPaxn;Sl5EU}NJ@5Iw*rrlF*xrJ& zAw+`r0CWI=gocpzx4{4FgM^HNiiQrtz{J8vOlTkgkdaVOkWo?4&`?nkvjY+50aQXX zA_g95^amQ|AVybW-jIY`45lYFpGh<)j+pr@+(I$2NXf`4C|Mq|vaxgU3kV7ci-^iR zm6elMP*i%ZrLCi@r*B|s^}^c5*3RDD!_&*#$Ja0HRd__?>!|3&q_@c_scGpMdHDr} zMa3nhWwmwn4UJ9BEv;SMJzskJ`UeIlr>19S=jOjItgfwZ{P?-KwY_tEa(Z@tad~xp z^II<@0OfzG^*7D_q!%GVFJx3y6jab}y^xT-5d(z~6^(%hok&^(WbXQakv9Z`_(?); z&1Xy|KFuQ%3%3a@QfB^DmgC>5{h`@^O|j5_O0&Ny_V0Qv05~W}h{Z!81R%iW4O?C? z)?ec<8~o)1fBC>)KJfpC4}cTkKzP>5Ey`W>=U&J}5gb@!*Il!DMKg$NbO53|-WP%c zgid>py(#phk2(h!@sC+N^N?J2}WW)!FDtZt?|N~fI5FJ#z=nWKH{9k_$J zNt*r4_~%4;A0d(7a+B7$!_DD-^cQ9o0IQuG~h|BA%#Y4 zU3(r+xJ`*Aw~)0X{WEZ)AuZ>#G(S?Qtl8&vHT)&tAf5MA!voSA&8*bf<WPfK9#i!JGva%;D0z7;Z?D2vZE$q8`b|2v zPT;^XfB8Bbz!BxEDo)l&3>y1w2Pqm(5?$44M*#Wacs$HLLR1zllgS`1K$fc^jS zp%QLq`l$*oZl{8i{rxq&!hi=2#6+qf)?hz%)H7XtIDj42nR7-12L=@3z*RJ4)y3z* zeOSjeJ3>xE-M!+kse7AWv%ZjY!-1}qBi1_*^h6VS&lrDYM2iXs-XGp8slkCB4aCIA zx*Th7;{V$vX>$kA-?|FeOgFrCA_{3K_x;z6S}y(iN(R*4Vy zTpk?Y$m9b_-yfR3FJarpv16(22_|A;Lb+QQjqW3}mRMqbIlnzlX(;kgETDa3sipU) z$^DG%p@CvyB0j%l_pME{v5HLpn`JNmR75~VsCx`u6|>7t@4eG7A#KO~?{su;oPq93sR=JCZN%0?U-xkVwMVm~yL>`xbgKoZfs`?AKiLS}FYuxIqg& znKqK>x|Sv4e1*QyCM7G=3uTqGd>4PK!#k8ULr5V1~6ofWI@zeqNB9N$J zb`p$?IA7nNhoL=v4PHg22hq`M*fEu)dIbx$?VqDY9W>~}Et_oA1HMkuex-_{*q~D? zzi3S9srRaEvF7kYTe+G3rb2Cz1#{f9nyPlR;VZvR4Cf#tAN%O#BGdZC<`LAJ0q~-x zg&J&nnyGjVZ@tHS8%yWcQDM(sbY&knNhgr@%G4z14mz4HMnM_W%raBONpZ>MIUo4g zPp4s?&cQr8pIJ9e+MSsUESi%kcv$I&mUXj(O!das1?grjf}t_gf5DXMnjflFZ+Xr zEHUuk97q@3p@^XP^yu;~jI-lRbM}Y%75Dv9HGJqi?;;$K<#f`GjpLUH(K}i5DD`J9F^}{(T$bGARpCH{!;^!);q+KHk^z?22R;>x zF_!kQwe5sRCdik41zFpl9!v66QerjZzqG2R1adPh9@Dw7v@m@_6P$fwVKyj38+3Fp zmxEhm&7#OuMTsU6K2RMOp20zNEwa056#r=BHgBB8EviboIj3c6mf2fxi|1IZg zo7nP`(H&du$;XHEIxCsMZIPRMt|n#7s5Y5_;Nee%M%ZoWtS*TJTk;l|s8EBmI?`|I z`;u94JVUPEJYGa!$28+4WXoTF;ZDo>Mwj(`*~OVzE1XG8_p-{3sv5vo>#kJZP+j^s zg>O5Kbn`P*@v^#-H6ts2TO*yRNn5`d$c~T$GCbTmSNPrJKEseS@ty(lXnVR8Y;}NG zyGqV5R+@+e@mivd!kV?WmXW7rh!(e}jGZ^N?1LdQY!M5=*gH1lUoWOZi`ub!{JaXX zAASI+{#Fk#T}s(IT@<>@ zfl3K4e&y+fn0PiEr2C0&qu~XFgS*ribkU3h!*_hKW)`)uqLBiRjb|GR*bX{rl^ag; z_XI+oaDXybBxUTGxktG()95g9qZ_Zt9j@CAthm&e9;T@Cjc$!YDeQbR<2@J9`Q(J~ z)_LlPZe6F5H)(cLpX{rR=ab`pxizxbyQwgv_~^s9kx4_)qteqrVd|h{jyk@BN6Oh7 zGbOW+eKfK+NfZf2rO+i{4$j>Rkzbu=DKH7!T=!;X>}r^&Hp=#5oy2u{)?%!hIu!Gt zg&HNrYkWU1Hm7;T-l~srz}Gif9D5LN0Sh~=fa2O)oXq4qpYJ?Yt8`lEEx~l3j^AL% zxqu*7eMg^K^j_TY;suzi&Sb^*aMS zm`e`LLx|E%);gz+!P45XU?Z%LqT)_~DY_d#VPG2yhPJodYu5=@zu*`l8+94U29@xq zF=722JXcm7`4CQ3>l*%Y6{@s#27aarrbxF*Tv$-Ztf8P9%v}i;)xPSwc64jY40%v0 z6YBGl@H%?xqYhu4Z(kHH7|CPR*Cpm&8kV?*oe5ZFVTWBtYABHt z;Q0vjF8ge6m}LY;C^DrlZhv+2hc#E(7|wXcsS4wE%TqsEOkgtpD80#?1cPLOL*@3doH+BF_E@!#9XtVuxnCE&Bc#7&zrJL1mH=GC_C=Ge5+X}+VH8NHEgd*mv=ay<<*YDV74z4wGL}k^`x>K*kime z761*yfgC9?zSeZc?4IEmWx>>o(B5TvI8Y6l%x#qY)L~myB(*4%Jhp5C2S&^827;a% zc)xb4O305X7TUDW!M&pB!b9rcUeycrP|iDOwZ10yq9Sp@7m+{&h0TgpSCl0; z>ad9W1}bUSzyMOK9y64XPVkht-|XhsSUX+m{484rQ*=iT#)ts@bE z(luhqm#AT*huiuTohToTycqpcGzNr?X41af_ChHOX;jj^p3)et1ad@)KI2cT?@J3s zjSRa3$IdS74M%JY%8xV!xz4>GTQznZx)!vJmP1Fg@TtU{t#!pP3l{gL3{E5Ev%^2{LG0B_NLD8Db5Sf!+Vp1N-;?(_9%x z{u6}w7eWN^ zPj9zsxZOXD?{j|dezT+y{LUx+67PKk>$Ps?vcmEA;dcNU3{Qmq43g|+fR~FhFHkk7 z7)qdrQyw;dA#pRu%3O(Lo1ZPiSoI_v6<_+9E$7HSlXikaUGfkExsjO}QYMsRD?rAR z=}L|~eMq?Z+OWmAa_+Q>a>1QC6uPDvFkB6+wNiM#KJ*aId$DjlN_F?6&%h6J>D!fK zv0!0&d#YF7&H}mNkbWmPGo%d^3Vz;W!rIjK34M0?tK)>_5Kw|aAB&WY&JkYX@8C*O zQPB>I1Ub1c773S)<S+UPB!uJ~Jfr zOioOni#a)%4Q3^b%Kg#{h4s9wX6r<$xW6I&-bB*Kz5Y^i{k&2E0VL}bN?kFQhBuIx zf$hndC8C-Rnv3+8!C-nO=bMJz>*p`#))ml)ycv9%XY?xb$=;DX2_EJJy{CclsZ1xN zYs(`Uiq{59s?8-y4GKM2Q1r9$qI={tyyx$TQhMt^;mIV}Jf}13--KqQFyFs^yH=$_ zy7uMOXJ(eyn2ux_EG zNRZL?p&mrYy2_Akk>??0&{hD^d~|4-a6DkCX+^f<9ImZ<6pbVSVv;By^b8bJuRmr^ zSRUNN8AffZX-AoZWy$i?`7*{DM!vv8g~prSe-v-+d=rlJ(|l^eb$V7NH_=0ni`8~U z@w3P>?DmYL5Seo(_BDc5F(6qyRzzeEDj*!NaK_}3l4;JjMvgEkMV&FaN}Hk8-q7VL z+%uf_L|z+ft>6(m@pncJtTDO{6iTaFF>qvjGuUsX+u`9%LfvaO} zzkHR5xu&J?f?Q;iB*s7JGLen`m0Bz2*t#~Tkez~pMiv0got83r!DKy!&D`*Fi`Y?Z zP7A4BrRekmxLSmA@c5(pKbvy}<>}H*w%)8s5&AUF=%zr`Oqi$4($9lAd;6Q9Ye^bKV%#FrF8{}a}apLkSmR-IVEWT9|4 z7;?KyyML*8vhL(JC*7csu|O_FEEiiYG+eL}8?qhU+gzK$cCsBT3I~!jZLf`<>|2bZX1&mBHglRh~D1 zbI|&&{b+8n?RN1PwXV+bC{jXZqjCQ|Qa@d;_;e6KrcpOv9|axPWPZ&pdmPb#%g)t+ z7mdUS_;qgaRukExQi-l7ny%}>l8ev-W2fBr`rDrfyMQ@5dti9QX-f{N4+WNN`YiH} z_>(Xp47#F!-%zt=M%6Wne|wL=t0*DQNvNyRl&bY9kdo&V#une^6^X{Yi6{B;`MUC9 z-n1<=A@6N7P7ON1^kgyQ3qflk@yU1YcN5pzZQ$G=ik#ibVCN*ePG(CH3a9L3rDxWY zOTvwlO#RElv7+~<^Ow)zK%n-nh}F!jbzs6vYv~VOh_^FI74O1wK`JI)0)vi4VYbZO zxO+%ngQoI{5&>hJUg-;2;LUSpn_UA;>jc4zoHuTdfj$yr&OMUoT%2_P_nHYNeDl1O zw#Z%0&K;B8o^wJ3GC=aiB`oAB6Mwc4GYcBwY@M>aYWaE!KCdV@b&p(bNg^`a$_=U2nsWRCgVd3mFag#tk3N_JM37-naV<3@}(Xgp1Q z+q1EL{P2Tg^-qTuQoA2|uc8HSf?e-P2o3Nq#GYy682DN;b=;`PC*#7@>^qND-_Hgy zv_#Q#-_>kDx_E-cJj=h)%{W@NxB3r7V%8_5h@Tpnv;Uv;Y0rn^_A{MnrR7y2aE$v+lyt;?%P(3k(NKBVjhkYNs_FVh z$2{yB(UAJAQa0G4P!G)*rf8w0kt?k+ zB8U!FQV!s>TJF`#ATL!FpitFanxO$zl7QU)BcuA)F(pd-sn)B+t3L5)Aw12;^Xo;p zqMQaNJoU>@RuJjB{IITvb_v9pRr#kCCSpG_%~ZbCSv ztn^{qbCr5A8q>KjR7Yi37b+r-Olqr5@ma?od$>$SBw}BiWc$&wSISrxO`0g| z85`|Ii;xQ*1jE{A4ha**SswZ_{&3f@`{Lx0{-ZIyIMhWuj7?4xUDV_7>Sw_*^7TD= z+tcgU%{t^1XOO|fJ+1t!%%vJ-Zy!f9pm*S78h+bc#_pUdOd0z4yNomGB(Z{=6jERH zCn$=ZpG3W`(~fc*~0M&{GLgM`naj z+PqU*`fj=xB`e`Tz*M5c)41@=Nnig=o*%x8dbKiqVkECtQhI%Mtj^(pk4{xQpMF7? zf;S%z6Zwkc*jc{WQidQ=x|?2NWch5*6u2hcXsBb%eHJ8d2GX*ZXh)}1% z0ZT*?!XW)(x7doxLP{lYV29Uk8P-40p!ZZ5c%xz-rP=|7*_X6OWmy2NvB<7ntMUeI z(6TJ0dCbZg5()}wCtexL;>i;B)B%MzPs{YZIk>-VwiMv(IJV0c8ZP!GeUpE{280$j zeRRzhbN~;for9%dm`o$uzvfM68$8$o*gc^V1+#JPYS91MGruY�GVvG(JcM-7VRzIu3gmN|L$2Wn!ac?95AWVtP_086{}|h zb|$Kd~cFLBR>W>yLq5xbw%|q1RH~HLt zsbRLvO$=dS7n3z=a>wCG+E8KPD&Rq-ou1EsIL+F~0Y;&&?H@qot0cEYDp}ZSHm^0! z;&UZQkR>U%>Cf*j?_qv>H*@+b$iE@VpFz|=mA4_0)6;3l zIIC!mFw)t7YlO#<=Wg+0h^U_$D^WnCG?9AdRnvMQo+2%a%2N&FRPe})Z%^@jD$}3a zRxD;mgqIc(`7KkQ4K$Q-w79ir6l;$8acv>8I9UG{)c$n=jm>CjC!1|Lv1@T}R&E(5 z;;AL=ir4v#O?3Zk#o<|bPi=F1^or8Ev|bs!TbsFPW0h-z-M7L^#q@#Dn;D%aRxPnL z1EV6hww4ATa@3+Ne-4R65hAhLNgPksbFS7gex+|d)oVQ;RXFf1QcbUEh67dhkmH-6 z7z{qWoD0M2=y%3Ow6Lxvf>e4zY5Z59gBz%{ftL_Lx5uQ)M7^|ON+;?{i2fz-rg3BA z>RYv>gel=th+7<^0`I#lDe*-2FI>USRSkGq9w)M_2Sey5M-+O5Io-h!(T|l56FVkE z*+0b|*tyYA*9S1U$KE+)&k(U?)e&vTw|NH zFAHbX4rZiB)o7RP7?f01_ZpM*zKB-}EG35PU=_w7E1lospH*_QK3I&R_jo$0dN16K z|5MiWgBo}jXA(IQD51ua@LfxNE>y}E$MrKxe{=@#d%V(1`$3@{KU4*Ep&W6UAI8lR zT`N<9k74w4SQJ>KFYX_ClJ94%rmCF~iLkU~ug0vr_!ODlU^XCXYWUMmiHzDw7$U$a zlTOv-im~sCHZ8>Kq_*5Q&nCWkT+!l*X*N}5l_4-BdqU!*OKlb3w@~_VZBg-&`@TBH z3w0Mg%PA)Qra1@y!jT{4jLDdxOAk7?AY%fG-VYWw?$I1C$=s-2$u$y2G3i!~6I(gV z^Kb%v;Q-cJ0Ej-?aAXsNB~rDG^?}tJWueKScld{!Yx2aP&-<=aw%!Sn=W83d8`Un3 zPo!!lKcdeWkY5tHe$9NzdYrE^L%FkSDHXOcHmE4^(Y^$&-gX*MRyWtU@>$pHtFW%K z)6c?E-6nI=!2R_h76-Qe(ZOEH~b-HALM(ljoYwsr2qO^-ErTN_;K{NxtA7Gx8k#i z@z9-U7jz(o-%=Z}-IQy_8LO(2=6VZrsSxaL+^KE6}bK;iLVEy~y( zzx|Fw<`D%5QG24{Vf~$d-Xi~<0Men0gKRuK#=i^Wf}TCsL6n825M?0(I55w2@EF8je{wN2OZB%^lunp9_z!-u8CT{dy?Z%HN$85EYJAVxv;()M<&H8a}=0S3t=%mzGUNp6A7eTRW|eJ*m7TwyV|{vBI6_bV^Ao_?DI$MutBbl_?PNdK%%mp9`pIP90P|jF62A zg-Ovp-t3?Om(L_BCP#K zxcqLFob^PY-)Es~N_j%LE8%Ia0@C7l+U3?MTix+he(<+=fBgFF;zf6QA83y+!(KeT8Q`(! z2?7Y~*~oA3kE0^Y$5ScKJq(`@T^?iA0K;OMuM=j zT^9?bTR%m*(_|*m01~m+N$ct;!d8+Mx+2nPH2CEO9-8 zDH{Sk6{GGAQ4E3M1`PRP%99V2Q`FnK5g8RyG0Mr8*S2DjhltoQnfg5R-D_* zm}4t;VrcP}SBP2`BVOwLUJjztRa#}Uda`#Umo4!Fl38smfbz$){U^$YC`9Kbq7GzMWvOU93~ru!nPG*bD?n0|i(U7#t}I%VFf7{0 zy2{vYvWr)1hM=Mty~SL3#N^TSj5V(oc|}YQ^=$h{gKq_RyK}@-vt{SUpk_+M%hy8M zzE46FIeQH&b$sg#8Ut16ZxRM(^9wDL6g55Q5!tcdf%IROJ-^yDiPoJ9lc%D6B{W5| zoyy6#3?6M$mXvR=}K(FVKmKW)ewb@hM) z^`;eTF2ycluG@xE&4;6b zeVjjN9g-${mo=w9tO)uR^FyOnu$zyMkvN>A3VevV%TH z6Z-m2_(mI}=HaTR4JDBkRZPx}4GhsY#@UgqKR@zD9@RY~0%Jyq;7j@k@^9_cSAqnWD16oAs=2*H5%qKMbHkAL$CMJ5U8GKO;?b!&p!5iqdvd zKL>61F}70iJDw~rkvtd)si%HD?qPJ#oa1iP(PKEk86dW~`1B$B<3}hSMua&(6hpqq z9yRW$2aT0{ZH2*%V(-cMi&weo=Jr!$rtrS68u=a<U{+IxA1QPk+*uZS9jdA$^t!a9z(r0xFx3vuvWLFx1HQpjwH zk8@AbXP?9Y%DW7C!h!k|e9PP2x*5l{6mAjnDEt@HltisGUq_L=i{(n~I7To;cxqST zjVLGToAq_RSrmGgANj%T)1#k=kkD)pDcZeceA}}%Dy=bBOjikb@CDC~`iBcwxS>H* zKDK6v#rXFp8(VP27KaK`|2oyE>ElOo>Xn7*aArDgBG*5CJ&{=O_@bf>avpiN(0ZI- zq%RjJkDjOAJYC^$69U;#nSulR#lNx7ZpIO?xRNDomFb(AG!tI z#R$`s=%8Wy!11~zpRnEfqiNR@Rxv}0YWmuw{EL*3HN^`=e@$y6|@l%;sN!nfe^QBYN zUPa5X#t$F6k15F*rod(*jLMGQANied^k&gR>l|ZQANerD0nunG<##0}Vm>DF^yV7w z*@!xsygy6yvnOj{$Kbg1Z*btveS`AqgR5Awr-62s3wS2=l@`h8`5T>+HP%Sliu-fO z^^R+kn)_Cb_1PzzBFHkg3BGp@`Hkb}izh8wQ@KIKNQ!g^)uug@8Yq0~6W^45nL3)A z>hVC2+*$8Vc%kB2lqQiVDv-K8Cr^qu$BznLf4)JvU1$??#zXCr+k#(yM-((Rp&d4_{Iga3_s( z5d_g7vhN>iVhQl@c)GNNgmfASOVW=5u6qp1XG>~4oh5!Sq*{@HR!X!l_!<7I*gGW};8mc}EI5sybM*nA{_Rf+@{Ddd=? zQ(e!FP-uAhfZU6253zQ@sj=8{2&>!}Uvwqk7d zOykg%_Rj%Tu{oi#r2z-~x}M<*7(S@FraZ%`4R_~}^RLq5tXRK02|J7%vQFp;%QhUi zKm;v00T0g+AEYK9CRND!+#i3}gGP&_ja?nsBNfvp z*h%%2BX+*!bB=Eu*gqW?jM(-qPrn;^udJ@|+}%mb_VYvgRrMi#!2=nc3VE;&%zmP- zMae~dLSyf9P{qQJMV-%50Tvd7_B@rkxDo_+-n2F3nV#BGHt? zwIZq&d4tXN+4KAnX9I5rv(m~qhnNrDl`n>WWuzI$`#C;5@R%yf{oUpm_eXXOkzvEH z2|%zcM%KuVJqkxeXH*Iz!}b}9=#28e|D5vuhTXxf-7Y}ogryA;tO}(60ZskgNdX#u z6CgHg)_Uqx|Ld+TXhYCVz1cP{^3>tYiZ&~S_ehFdnt=gZ}ZaVBtEY~NsR&Y zckT|Do4DBYaNv{F*rtJ32fh>Wb!uZ%6Dl0Q2v3k-5vg{~iwW7dXf+&yP9Y$45!A#37N}gvC7*vof^ysoH8|&BKSlaEzLn|;Pc=rsA{wAE%0KDc!zy)&#E`?|?GWnR@9EfU(t=;+k^rJ_6j`X{t2 zdA~{nJ21r!WEPk6!`PA79W7qO5u!Oq9rF%c?#NG#5&N>5`&W1EFP&Qb;nyXNd}XXy zp?ru!qq!{gXUixDsy>VFmT6{Cm(wdP#;!{{%NIgZ0|MVP%(=lQyZp1TP$NDLQ|N5$WZq*o@zM?rCWZlS-;~9DR?eJ}}W2m#Fyg zNCAD&bnM`~WWOD}wrhvdx4Lg;b|U$O3xC_xU_#%`!M(v^pVma%vfCq0%&2ARw(TRC zrXm?RxBbeSp!`Z3lPg05(bt4_WIwRcmpC&+PO`76-)~iN=k`q>VLRjQy2^V{$#-1m z(L?hgtS1%UujCLthBrt%&?`*VE8R(c4HrrK)UJiIsD^qP#kUuNZgksv3y~r1Pb*mJ zkM2ul6$L;}T#Ab*I zgj<e@v#yt zK&S$`g41#PEYMLFn}VxHnczpOq};nM6L#n8l=Jvoq~EtJi(45Zd4PCA|0a~c1q-pb zRU4jgN~3(A=~4Jq`F;4`E^P}3UU7dhJ$tkZxkBPb>_aiNn2?`inXj&t?CYz!68fU_ z{p4@wbk~lGqdGUcGy3j)c;Wxn@4spc$@*V(G=Ey!{Ck2966pB9YZoI+XOwF8 z-<3CRl^pSswhl)<7Jn{VyjYOXU#L$m_dXP#J`CZMc`a0JErZL;UD0d>zxnNd(gE_< Xxxei5mk<2q1AqCz|6?COfq(rU4)pAi diff --git a/jay_imagenet_for_co3d_val_0.1_images/ILSVRC2012_val_00039433.JPEG b/jay_imagenet_for_co3d_val_0.1_images/ILSVRC2012_val_00039433.JPEG deleted file mode 100644 index b3f9ee06d746e9f254f7f0d8d64cdfbaaaa38f39..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 15079 zcmeHt2UJwe^5+Evlqg9hBOp;lk_3SfksOAMBoRS!kSIYI1PPA7ND>?bB z0RdnQegXU>pa7gFAt5CpK2J(Ydf~!(G75T13UYD^Mp`;*dR8WOHdZDU77p&4yd0d@ zxmZ~EB>1o25*8B^W9OBUl@yV=DJmxNs}q6?7cNkcQ(U2>ydrXy<*LYk{K0<(XwDPN z5e5+vZ~}xh1Vl6h_!fX200@Y|(f%6n-#!F{MCXV}NY7s&BL@kpF9L)FL_~z=h=_^L zodc-@zI?v*z(==Se$i#~P_zCd>A@)bs=YdpMs z`~qU)wN ze8Strq<6@S%&hF3+`RmPipr|$k2SS*pPHImTHD$?I=co2hlWQ+$Hph-7Z#U(EU&Dt zt?%ybqYn;`j!#a1@kIa-{mIrpIQu8QXh6OQ&z&PWNBWB|0>WotA)+})d{vl)R$hnH z(v6N&zmV`bKdIt~(j4*L$P|uOB1Sv{gUN!3J-GFSj^W5&H)*`1V{y z8(5S4KbmCXXeotofv8_xT$Z=7?>A1{d9w!FKlqdD+!mw?2lE#wRuuL}?;eoV1%u75lulU^< zi8hqLTr&}ZtTjN^;=oqyI|+8E{j)#GKaq>tz`-edS$ZFAH2X&*mFK4E(KV=(3wYod zc{8|sBBKlu>B+#syx2!{L<`#$8(f4^hmpB<@TML7dIWhyDat?bcg-`oHe8fyEtFDR z6*)Jz9_^c^b8tk1Bton6nhguCth0lW{edNokyeo>Jj~r7S#vQDon# zl>wQTpT)$d;em+EacCvAtz7puUkN+zGU*G*>K!}~3r^)OJ*;VNhZjo<+tY>NI1x|e zx@CV4^Z({-26ydIDmBdzg|rRl+Cm>;Uc>`qA}e@6QQXClJqogPf0r5?aubHuGf-YoR9;YS*VnnrD9NyyTBf60{5et- z?%2{fq%D{pucJsr#@CoV5x_^lm+R!JX2Fl;WBA_1kJCHVJrP>FYfjUzdqTaYJU%<{ znpA`{Bj@bSsm|dubVg%$gkAoZUR->dEllk?O9vdDy!rF*bXPO7K{{Pu>NUlXcQpOb zy|jV2Z?0;DtIqVFOMxyIhLdeGC)`V(k=fzAJ1NRd)sa|)kFp>tF)`a*mv?<%AXZzR)RCdwy_>o#v* zW=}B+lNXIWFUeZa!QdcabgiCzS?DNyMTOhqCv|TYVJf}e64m3Y90%+vcYMGftpZv8 zdD0KL9W1TaEc;~p_Oqu1j^(laCweVvu$O`Qbwo+M9hu^JSE1X~T`7k?cwhyI(|0Eo zj^fAmV+5QGbH^XcKDOve+$@LniDUAXsH_BHtG{$!#_ZDv!09mT`I&E|A|r>OM}xb? znUW|0EN*b9z*nd*5=yb_>o{of0zwB42`#2|(N?lvvUfP{zv8MIa&j>UxOUD0@ zHiM{nEg5lpJV1m8zE6B~7-@q(&4BYq2vWDDfeRLnYfOZk@)x?7VL%#A-U!$xqX)R6 z$N~th0EAXfy71JiG>ykiUod0mG+4=TT-p0(X0B={^CjffSUEaKsKJ;1!;pklzr-*n zOMlL861N9|+|^F?W0L>@RgJuhf|{3z4Bf%{OVu4+?wA>JN0T+f-auX#f3 zk7Vm=>nV;59mE#M=OWYwW`g5->C$3$CkqpC@=@yY(rk7sw)iwBwki}@9E4`W z$3_R@HmwE_AE`~`#$@e=V5rNRj%&)l{@!}Mp?=tUkf{@hftDlG>?_#fcYVH8p&Y@^ zST{K7tY!HrgV2S9YXJ_5wR&rJQ%pL#-?k78RNB^zIcCP{3qClC%#}<}Nph%6YmN(KjkiC<1<+S>tjycUZ z<2Sc1Q6sKO_xiLSY*+_uRB+=(q|Q5P4j5>+OU=?=Klio$(Cf^J*fI+Fsl-gqe%3xZ zd7}N}QTg`>od8_b2fkHeU-R~sVk<~}b%p$P(d?PH0fbgYe%vV>TLHls+Th3?jUe+e*&{xVT7aJXuz_v80^NmzrsvAL z)c%g5c)%$c55RNxXg03edygh6?{b;p1j!Zf0N{po=!VgHX<$yxNLS7DM#~VY7qs}A zOBA1KV>D`Y$yXP5-hgiq&zPlONT65Gl;o;?0}GYYt{Z+SyUPF5EaEsJCvJb-EO*cz zSN#|g;YSg^B0=L{<%_#NU}vi$&lcZw=9iBVC=P4LTCclgRIWB>YpaAoh*fs=y!lLa z>4S-q@>!AS8g$a2sNZFC(WCiAu(DY+KhFAX$t4(4v>z*174*C?0s&%u_S7@mZ?}t? z-nh@VJ)`i%tYeyQ(Mqvc?U!)12FhyE?yQYm+VJorvG8r;Wo)l&<&Zd9Uze)>=mH75 zC)Hkiki*=daO3rr2+~L`DXd@*wB;vlChLf|wVyADsjY@#?{*=&-a@eK`#VJYVp6&g z5eMHln|*gbTS&KGzvUK7&`Wh;!)$YB#kO~BdSlC+_)E5Zk=?>xm*%ddY?=Pt63vD& zosb~z2%UAv0=>Ia5V|w{frtpSG$0N*@;1x(_>JO7@jyCqq`>sz5eU^u!T*c`Ag=#c zm?+5H<;>jWS}Yjv=N;z}eK73O*rg#jp`0EOoFZ|_ulH3kvz^*Xu%M9HkVbd6<`ZKx z)(~Vg_H}*Km#n?cAy{YG%T zASKch?52`^gr5;+=LfBlb%>JS&DKIna&7lpsxv6vNrKtIOys`fs!QFn&zZe2rn~>x zNq1-@Ma_$0L1@BaBykJ6p{=8~{64Sj=1gi$xAd@@fq6IaopMR#a{sNOrVnoo9Z-%| zpQ!{?H*c4!dE2m!ynkXv3y-zK)U+z=dhvQ}S`dR9m{;>Z&qz#RtJ%d}%{vyDh*UN_ zQ1+w-2RE52WwM}?dF29H-u#RI%1@d%U5owpm2DwoR=XR6DE1Kb^gOk=?GT_^yu~xw z_i3tm2@!X&^}qy*30c6vQoK&h9%`k1K6taTQ<9J&>K{8Zt!)3qozHIItXL|w=9+%SFTBMpAFb4j$82V zFIM#@h=ENu~1+{|Rmi>-55?L0~M zddsPMU&`>|dGS@GT#mRV6M56^%E()Iz>bCehuOJ)OwK0@B2`rLUOOI;S>f6@fGj?c zWh@xm^pu!7a!0M%k(%-kMO6fK8JDIB!z9kyp?a);g2%v(gfxAkYtFJ!P65&iZAV=Tx zmmg%+SPB%jDR*D7ra`BV7d|5&J83iRV3xEx4l#5t@j&widA%$3!zr`y2iw-uv}KK4 zq|5Cnbg^n;JRa~rWm+xaj@p`1>Uw&I>1)#Semu~CrMzAA;!es+SeqER#W6cB%+as{ zBZUVzm|c%A<6b)IBFDmOK^bge2w#Mr(8z$8{ped7mVUVIL7*5Oh@oqN98%QbfqqQ` z_E!w-uh>})UPBKu3Wr~7*$_6w`^6m_504Rc2`rUa>-(Ac>X;aj(FjWQ4wmmG6<}&# zbJ3(5xHufVS7~uGHmc#gEi<_3b!|$jm^)e7%YmUlXIa^b>Ma}P@j_)uvB8BaEc$O8 zBi}etw%SgwTfX8RC1&!8e%4pMpqFmk!<`f+j?VmMp_YBvjr5y}GyhzKHYY1|pN;<# z@2kG1eSK*t*u>CAivje%uLbbSB%rft2TtOvxsD=i-+YM=BSz=TG|Iavjt#9-Mo)_l zR6VMWjCKfmLu5$1L@-jBmo*Z`fX(`7CA4Kl=4SRb-S({{!w8hmZKU!^%|4~0+WPI7 z)gRSA6#cuGOom6R?{|C~GBP{_4htlhya^VnD^>%DWjwECcay!HWwYy1m92{$I`3U8 zeup(t*M!w3wXn({xyt;#p-q}ZmjJeTSvsd$&aaBQaBA6$m{2X-$o1s;umIF!HhU-Y zukqs&h##GQjUW5V4vb0LW)%CnL$ql#LL+rqF466(U_8Q;trk~35%a^e{R)nOuy%{X zW@5(d4>lV947YYQl*L?bJ_H`Gh-XgiIFapZBM@hZ#GoBKe6 zd@XKkruhx+FyaBFA!V!>bjdN*f+k~ZLu`4n1cb)wI+z9?P}{U(XdO#CP94VsqhJ8& zJ5#+~y?&otyOl(Xu(41!+zY zqsZsnQKpht7*mbv9@@hNTPclA}IRyNtzcUpMUnuU`L8P2wOt0O06k= zuNDnaTvA(O7W&qEcMZ*e5bOK|X*fcC3ZV3Ksoxq_FJUgEN;7Ho2~jJorBdxYB*`(^ zG}D@Fe#uHQhB!o_xl9t?&F@B0O~D#KAAxBw-AR24bWtCZHSgwv*6Hdm>(qyXJ4_Wp zm|z{R7TC{7%)tF^P646?j%Yzd7YqmWd@ZQRb17r%BHx< z=Ka!gfxUO?^Pm{>-goI$9k+QL1Pr41_BB%<-npz!nPHM7JXyv9Pf~!Ws@1=_sTjXa6yRFl^%udd zKjp0G8JWq6+nPbU6E$jXIz{U16(kfj<}EgenBS)vi4D=v{yMB%r;DYxDMwHnm6SYN z&a=ItnWk{>rf{W%7%sR93`80kBVqC;?s%ZEK&BhItJ61&UDOxA;(;0Qd)eE5Ltp3v zUeBRG*p>YSyRKOJY7lnGf5C3^>>+tR)PL zbfP@l(K-(x5rgRi51vnKC850`$BRa87uU=Tqw5cj5l1?cC)69}Rp$IqH|58y+Angn zVBnS#;|O+ zM($>En9AX3JyF`~Ydk|%B8{lb*>wA?F>=i)-Z!S-CwfA(wnPvfJLthCaIpM&aK*#W zab0l3z$#G%Dh}?ishq|G}v!Ukv(xbwGA9>E320rU$K#ewsD~wYXd4+m(mR7a}67`DUJ6Xe=6ew?9%eHRZ~S%+S)*vmMY4&CJ6C zQdoP668}N|M%64w>#N)cVpJLLtvHd=aA0!qlx#2&Yi?`b{K6!*bbGz;GA;PtXi(38 zGNl_G5QP1q-RLjp{?BQ*47IH|eP=kd*zAIinQ5gnN6bEzqK9ZYD`hyww;9yxQuV~% z!9%)zCAVWEUaQtDh5oz6Q6j6RQ&H{bF%rk4$_h7glv}=8Nxb+O!6+=|4qKdqaXPNd zTj)EQSeul@_*-&qnkF&b8tc8l!rsTQPnDiKwf1ap+`Q#+D$hh0Ea`ylfTLsKjnepG z{>WMYrM|3PPSWcQ@67Ui;&p=AY?~D#v7l{-tQP0VsmvO`hQp5GAkv1%lb8t8iPUJ- z&JF#Pv55+a^ac$th(9FM9KJ=i=iBPh&w}1CoOkgX`P}kgRd*Tk&|YIzwVA@u%snHz zPE6Hpk7OIVrqgZkx}w{A?pD#7Ctns8I+tiwhF4-fm|| zPgvl`v{zT@?W>%$52W4XssWTNTmZ9$$}Cv*MvRu!{p$o6fsypEj}t8b)LnTmJpy_Q z3>UM-28e3W)mT(~OS97Hnm16mD2!O(s#c3p(q<5O6fNh06AKE%-5W9H>v%@RRL<=> z!@Fp5n%n6B&)S1UtPGS{i}-GIEt1v@i;N3@OszXAL`{7`3IO{S|o(+&y$x_ZK5+UAGixM<$6xSnWvkH<5KV_qbi72k$_PK^+9Gd(<%FRztx z{??n<4Vqp14$lt?iVJS74y_e==x0~)Pt*(@t?6bua(WN59Oap6zc~`ujL%<=O&A}@ zYjk?ebEr3yUF9`va zA*0Kp8dpRuVV@)!UNj)58Xlb$TUkvRe079CA*=RVF{j8+lukB|6AqG*3c_x|i8ib$4W$=9T%3$uO@iq? zEUOSIR=L8q5F`_Yqu(%nn)s1)y%&-#4=QSIi<&&$m;|;OhU007r-4%Nl}UKCf~m*{ zKJVmyj&-sMOMz4{j9?l)Q49lW-VJodOKqsNX5o)z+~in&@^Xf2UdG`8sGu&SxsN_r zf3h_QzypZT(#NV@7nMW=R7<*hYJ;eslX>6u$;Akw=*xg+7s+-}#7_SFvE#zM=I*zh zmup4hFmD@RJy|09V^my= zl5m|ZJat~8IJb|6@ak!ounvz6wzY2X88 z;lnCZ#s)C#l*92)PCgi;`X6@zc^DP&I$nqKm0BN!o(qXFoU7+=6xV{&@#yGI%?6`c zJdAXIJn$+pwtlke(DE#@LDQgDpjPsmzURwKvRwDqAM-%}m({*vdYjdLswx*@V}Pg3C;4L9{p^+s^ym&ncYQRh81DjqIwNANF`#+ zaq~w(nx3bQ*RyQirzG?dF=dB;UQo5omAJwnyTl{8qv-9YluSMuE-7PlXMrQ8YrUbc zJw2)5I8jZ|s@bDZsx9l#X}{qCrmdm?mv@%SnP*gXBsUXWE%tAk>6E{A!ew|>`{xH~ zK38;alPtS{3EGFTa_p-PD$|lH=xkq3f*R=;(aYub+^*i!;}13OO{&zeI|jUpRbWf% zg%ulFF!{hFrGpX2M6gg%i=FSo&tJK)`O?{qMUW71VGfF*$3`D}b45h_s8A_#&&JFO z>jsqI=Y)>Ff_Yz_h?V-})&?v%lBv2y%KL`21G*u`4s~ z7(|l6l|3qhsh?aUp6zF!LJI?UEOrZ3103*x_wuDvN%7^Cc|Y&ZkOT=FLkLwO4BYXP zNnw)QOIGrV`ctEGR~-!8)&;wio9`r?C60kc*-Kh+p=@n@2SUz7FB>_Ph8Us zk5(#Y9q7An;TZUKj$V!CMyz10c|pASMfz>o?-uMA^+O*dFZ}Cbi%=Sf{$oR#D!aj{ z;+u%4kYl*i=&ymoKE+o*g^bULJVZ{U6_oxu>M(|}p~i$+abZE^S+Mu`u(W??dgxNg zxyj=(HgP)yh5&Mm^!d1(I^$)xj|a>K@xZIh6qt&XOz(;pb782dEAzAA8=!|53O0AJ ze6mOgS-NovHf4e1iS}gqVr!a)u95v*&0eVYB)lh;pL|Y9we7oy*@r{CqzIJN)ZV z#>>j%*hwBu`3Kb7p)%moawbZ=araGeDdm~w0|F1eV}fZg8_718W96S&*j3 z(2}=8g5-y`hVvsKkqBtjF2$SXYVNPCj~ymTkyqY!ig%S{ij;_IfAio*`HX0%tI>ME zhf_6Kko4;w5@m_Bz7=ot@-nJrtBQ*L4dq=_U$Bk)Yvq;mh`zSS$gU-*JJ58EP-&`H znbE4%rp?cAy55ExvI$?dXO(y|d)qdO%RcQf1s^H>QWOGH6^A<&8;eKQ(^L8Dm6gz# z6fXK&Mf-}UA1VuzQU?bpIjH+&}e76+4ucK!3lD~O)x`>;MqJRqtOX{B9<5$NT6 z?M$U=7XEy8zXu}yG}hu|36-J>H*?e4Oy)#QWbHG`#3X3@9dx$dzEkt|8jPDF$gO?? z30qvZIHJP?3ON%9v>hIpX74b3zE|fWTpl>{^VZ5{(C$z8RV3xlxgKFmoC_6W4`d2E@o{QJVjm)a_tab|sg z{^x>G8~He(e#nx*NYuS?YT)iul;U=19~U#~dO-g^`;nT&&`iay(Zr&msc=gAD_Xa{ zdEGQx)x+%YO*nzs-pcIcL>)Dm+LM7ycV9uQrgOXk#~Z|Z3R|(dT<0F6ie=|wroo{G z--SEb&M~Imfjs2`Zghv zDUq#5P9MJr+Zc*H)0Vi@%0PHU{o=ImRfj4EgU*`|=G@(|0-vMK>JiV{aUpu7Cn$mH z{Ep3!vi^;;djjKs)4GYq9ZS+CZ*JHi6nka3!CcTPPgIB z>xm4RERpGzUPK;j4qVny$MMyPvQQ%XXCI>5rrXh_HW`_3zIc4wLN_QXO9zZDlEXqR z!nMATCf|8+@Cwz`KmvET+UOp@*XJM@HOc1E<02aDs_wC!tq z)X(t(qfcHN_miErue_pve}4w0_H#9B-pew7d}_aC&@~jZbtssUdqqy~5iY^%Ol}_+ za1-(qG=5~;tLgWey>32*jUS=*S0LC^$h%^wfQC9)rZNBBaYdFpiB^yoF!M4jCa z-;|J?2x8|)q0l*|Yl$1KS8hw(4Q07vI=J>QP7zw6=x^l6!kQAgZgL-C@AjF3efkzI zexs1N{Zqo*-nroRbN6TG&4(GxUhb#5$t4H&mtB(J4Z#D~lQDvlYtm`)nP<{J?;tQ` zu7-!FMnyWhYdpFI{mVD9*STTsp`+Ln*PXoRgnZa{xt};p&9PB_KXg3ZU!hAYE`Jdx zKMFhgfoiJoYM^_N42|I&C{0NwB6nG-JuG#~h|3D?^-&J>q;!~6f=i*3k7rp5C#7Th zYKu{uQF^!+HEBol;B>pgWnb$fj9(ahdh3%BOWrS){4mmaNyreTdHt7|N zu!bAV)^Y0=$$86!n8xdvaNYhTl?+h}tJmswZ0gU^$W*H5PI0@nWxpG1`wEb9$4z{+ z%SlOTOG)XAM$MD&8ez$$`*46dc)}3OR-LziqgIVaU^|4MRsGVM@d;OVv)yPU&1$jq z_#Vap58Ugm!~;QKuw_>a)^vHH^z@Qf8U{!NO?7FxUFJsYCc8-h;@{{PQKDWJV1Fnl z4W7poYPCRL-+&ww2;l)bWz5w;0WdsnYm5iJUVt4V&o05`>TWh3deYzH&G>(5ej{DaH{PfT5#!^1wSc@K zH_aWbDA3U)e(s|0Ikw?SMTU;)>0ZO_Um$$-AI{9*otoeGKb^bZ^YD8fe#^jb8Tfxh J2H5fA{|&O<@L2!= diff --git a/jay_imagenet_for_co3d_val_0.1_images/ILSVRC2012_val_00042007.JPEG b/jay_imagenet_for_co3d_val_0.1_images/ILSVRC2012_val_00042007.JPEG deleted file mode 100644 index 907dedfa9db38a9608917c5021f43eaba0e49199..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 17297 zcmeIZ1yo!Avo9Llp-^aXr#PiZi>IZ<3lw)N?(Pz#Knd$)k_Cr?-f-@Aq4;XiszNkz@}gq?$vOGxVCx zTyp8e{JL&D7D4qBiuZ1l_>WkHHrP)8sP+%d{x!uy{~u}gKNR~v^;!ZDVxXaJ9tJ4@ z2)MrGCxbt4Q(zPy3Moe3v;M5_mHb0HJ|%x+#6RNq4)XP`cBHApDU4Xepc%)acN; z+ldeZ^~5Q75fp4wytDJNMi_wpv9H0A1m1``Ru!f;%-8LqUr-r+XpRK%0#BzEpYo0? z0ADMP@I~#Fg^$P;A|leqV~~Ky`!X4W9A)r5B;ebv`b`-q`l4V2XY%d&!?}22;I~^f zD~!Do-}3tdlV_4P^=LC15`Q@5au}}5>d`JVBxVqo?5Gc&uVpgG)KEvwUwj@4gZMb!6uvLTvz-1UtS|sq0 zfLA}^DS1f1$I2^>U&B}Kzs5Gf8%V&0i_b$uSmzBV5D6fagY%J~gC@L@05*0cV3PyI z`D1asUL5>?d(y&1)p`9a*za*bXX+pXbG)V0 zh-CJiU`s&@2xdr+i3EK9jZjoY0(#Yu0D`ctybC%cU{C=GfPp1b?)d)}^Z!R{auIM3 z2-6VwFY8GR`+rQc$-{J#1%!nStj9G0G(-9xL8B$p>j&j*P zV=Qm6@%*vRmeG&Z#%vyqXS?5MSF}JBjk>b(>>vX%TlMdQPAN=n`(%} zkJJz3?Qkp7Xssn~sdF4o#MY+^J_XqIx%RuN`+lAgl`l%fYjRzApUv!Ir}6aLi_uZ; zFJH;&IxKdwS7^Zg=8d$BM`!SbofSwZy9ZB+=a!Nn~DCAm_RaVwg zesP1zb4zl*#C_0sCAl|v`Psbdx)^R(d?1lFLgvfFk~0YfmI(`3zT zcZOY?KyGFliAtXj!jJ&5tk&(;f(4EO@N4`Zr={?ZxlBmF7t3)3*lwi?NDUf@+GM!H zHbesUN-E-e!STKEab*SID^*?85rF?J-97FGoiY3PBf8Um-g4TzeStql0+`t&JAY9* zA^`2l zYl0@r1ZR`UxYKi=rce_qJ3FY`pTk*C>HYIScEy|HRI#GQ5M@vM@A2$vk73PcwSt+w zpOpkVlG>dtms^r&&JXp5WLW*uKLofE*OXfZXmhUwL>3H5e!H*yaxPcx)7Tv8Nt2%t)MSr*?1 z66iq<{WCbGSe87VxR))nRQ2-mwg>m4mfZms0krN@4bx_HSQhovs`qzmBk4 zwNeXtYJM=OzJdgOZHLt1_}nvuEtnPmV4D+gY-LV)z}y1 zT}AioURdGw(mo(25+~T}NeoQ|KHJx!wR}87$BFLtIXOv2+}%%ui7JZ!XAkEHD+Z&} zt#(DDsKek^(jI2=ym0S9E-+GMcDLcV%tpk^T<6S~^N22pd7?h`&*;KwUTZwepSG{g zs-l8bf*PUCWQGT>cIaBE${itEvr$C3hDl;%Bc*$R6~$5px=XPfV^2SM7hEE$zFSZ1 zsU;ezY?~#>xvGr%&WFRTR8lR824k5*BwE?r6uQrc>>r zI>kWL$!l>ZU3iydQ(0f7*&np}5?E|?AjQ(z)GVMBC^`;sW_$;=^m;65k*5K!bS+=d zcXjo}w639kNfdwtfHrfaH$`!BVdF%}kVZ3Qf3|*t5#8J4EgkWyHmvc_m(G-Xb6z}$ z{^TP;@9LUVC9F?syskGJKTfmS+YGJdbibx8>!uu^p+1GQE>idHSl*ASc_jvOj)|on zB80eNqcsknqT78v3?i6r#2d3rk%ESdv05Ho)PP$$KYWgl5R+Peb${iK1Q_MED@Hgq zp~}OMa5LC6;*IJ9Bw#^c7YUFNcG6~u1#Z1LCAkmd16^on$!*HWZOTD3)nxqHtNy)} zK(3}JHi2I8XkX>bV5(-Rhc-n2bblkgSe&-e^;v1_2nF|yy3Q-l5;Fz#I`$8}Fd-pZ z#SXj5UND%W$l-rJX<+nv=MAZZBobgc?0iq&*s2>ufCMBy{PFXOun7s6R5D0~Ds5UP z@-C>!P%!^T^{rH3I^^NF;8JWgjWs42O=5zi-;@HByE0QEEv2X6Nl$jd=$M#$oZPmt z8y~Cd9pqVpkEyJxpGSS0yh6g)yp({>U2vqj(TIcJ?+6LCt54quVo)}&1`jpyPJ>lzHS}F}EI-0o@(rFPK}T}h@}3)#)KcCn+jM&t?ROOydDT4- zyi1{iY>aA92ks`ew+WRqFxV;QTNAWqU8*Fz60u0x_)4w{W+4~F-xQ>?O zKj&FGl3Mt_pn7h5AU>_D@Vi{)LnGm${7uX`e(nLqF1Y``Ts`y

9LbM6H6z+?j zEfdWDBt&^0utc1W_Dx##eZS&6RTw#Ljt@dUh{=9cv^J&khp>{g2g;dpM_ z<~rz{+W1v_4j+Y}0@KYSU`8EIw)vj@mnrzO$|_9Bwl~GYL1!s;xqeuzGVPusVKag?ijJX)Nv2P*pAHb_P_c?>u82GwDev6f}`V(T@ z%qkWn)eCNkW=avG-?7ecnqT}d!OnaY$e;PRqPjPO zK_hQMyD2Z<#!oT)>$Ia8geHyO0C6koJlAR~p#j3@bmX!iba^pQIlkK`e&?;@MsgKk zi@2Xc)d|YVSn-C_xMCh}Ja!o}D?Q^k>_0_!5-p8;etCO$jp^8Bwm?Wryig4uJ=Rr| zArIyz>CZ#u6_JSUbl@##B`UA5{gYRG1pT5y#oU6d3LqI5kW5Of0L;B8X<7j002IVf zqh7%8@IGS}6zS@cT<}C1`z~kJP!*45eFNC)nGo%-}LURP{6;fovG*8qrDia_x83 zm}@p1ysQBegfM$n%zwer=zGi8U>zS)$weW`>S{qDB9!WOMZ!DSr%^Iq9+Q$ffzpWl zzenc(!l+EEXKDMAj6vFOOKV}vo2fZ8Dp)u6o46M45EVK;?&vf?r(k!{71tKOjS z*a8EqZq(YoooeYA5||Gc$+F?OBvSM#_`o32qri7KkLiIK@3-&7H#PatBd$!PS01wc z+weGVlj`YZQN#0M;-2l?w+YYkR7?Gu%LNC^7ha-uIYQ2nfJ5q`Z(Q!Zve``v&8YM< zagw#X=fu0U_(?gY+uPWk)(-*8pBS^D7*8@?D!P3wyfg z+f(D7n+H|1z3rV(CyYs~sCWbwt;$i^2+v7vuqm511DAE7D#6Q9xsCl{I|0ruUyGoE ze6tfIK&#v!{aZ)c&z%?QL38}Ab6um`=vzsFc9KpeEIqd=IV6ik_p%&h>i`^#M(tA# z#}lU_@f)trPqnc5v7v$--QwAteSMxYH7Z_X6?mB+Un8FwNQfuy*vDf8ztzWx^Nq$ zdB@4`Tsicp*m!MLs5Ic^-E4{SVr5Am`R6x7xZV9WEp@ui7)A1W*e_Wk=p*u{ikMCD ze#22QO24U8jcHAk(R7Z270T|^$Ez0NS-LPJ^J59GAY&anHF?Xt~i`1}BCSGu}kACn=#xkjM||4;UH&xnFs zx*+a)E!EYSLC1Tb_IrCnVu42e)0l0~+ASG7*BBc4o+plr=O1s;5adqj9iulSC*}BO zwnaJ;by}U7Y2}c*UqQG=-$}kuvEQ2wA^|nAMp&jWe)Aq}mPI7s;(7JKtqq)G(;o$_ zXfpn#;;J6{OG#D+mq$He2Vjm`i_36~CsjY3I~!jytW_H*cUWb~YFTVHP&bLvamK&* z$6t_rtNDqvqCw(1I>oqDjE6RtYm3@|)Anvg z%53lPmfF(l-(_x=8NR{xsoF47*z=tjwN}@*BIv@gGmWt>Hcl1YEYywj(~%ih6uw_> zk}U)5zf%Fr7Q!dH)k6;ew!1!f?g#-*lZT!B&HURAvqbzq~f+}X| z{}i*$2(bQ2Ew#x*i72NYFh}_VRD=cmJA`(EXf~6|ahYp=;42{O1K46Ht!AsQ7^FAK z#1B^Tpl_)eHfo)^MB6g%NV^^mAvcST7(}on0pR}g+^*<&Ya9wokQ}N}vi9aR zC*^U!d*^j<-N=~Hkbv(po`O7)T4Xm4YRn=IuGtKyH9B3SiGjY~zDP87OSRT4ZZ|6s zf^1`2sJIgoaMXQ&iPk=*(pT#nlrdWR>AR?cH?70J8MUcvVY5QFm0b>{EdOoEU9QCC z(zX6vvQFQgm92^3<2Cv#OlYb8m-h*Lo3cL*Pqk=AMcq{-=k6E8c)3{~*}p3L$s-fY zNjYqu->-UL*A#^gDR04m;oi%5*nNF^LVHIzEsg28rmJz-Og{I1l}c@IHvJ}Puf~$E zFipThEo`tXK#eN7M?ZsM^))JjKdN0Ep!OW3b=NLwZtncr+K?S|;K8wQ7`759BD`Ia zuPqozHE|g&7eQd#Fx(8@q0lkKCB|?)Qd53@Shn>JO%~G;@VSC919IJF&r-@M?sz7|~PhN3jk|j?*`MlWFn_t8rqR zt!}p!pK_?g_4fB5AEQ%?+A`-KRGpHw+-iyI<G3 zpMkIL%FZ$f7LzddzSy-8nlKU1n>eb-^}fS7Af``JZBC13pm(mdjWDLzr41?Z#{ zuaSZs!Fg}nR{8N(tvH)2WS+0G7(7PMCVCypq+2e6|V86}Q0h`R!po5LcQ_@vMhc)=HwMQzR>4 z(l^-exWZGq1M#Sx0QtEW;0<{sAl??(Q)%n>A?*e0C;!>khxQ!ZC`YuTF6C%Zt`tZY zuv4I2%d9j!Fz}ehH&`(km}>Yz=;nA0d1Wo0-)vfLgQ_-&V)VCUOC5Fu1*j!r7JTc0FmoTMI;g9KG}? zb;a*!IItrXqE%zu88t#0*n2)E?{?Ja2TGV+b3NqWH>E!hi(4wp*zUba2BWS9^>8 z*POBWpOeap!c*qLQx<3iN5Qw`IdWR+I=XJ-OMNirlz$d?VBDo04Lr9oO0nwI`t;7T zR9qp?tNCDP)-?Fs=Fl)TI~F2b{w?&f;GMd}TVr&x)VSaS49ZdIYyA7e%(R;8Ni?7|eT9O$GL$CV_i5oTmP_o^I{i6^9`Fpu59 z>Y`n)G*|p-+98IFSjKkI2nxs=E@eq^n8*Q_V+mV%x+=07T77N}=6_PYxUx$-SCPo9 zMK7_rH@3G^W6H70RqgS@YdNH=%C|ZbZ?m{?x=>GrRDp^PJ)C7#tNN${mv8|n)Uj0F>bo|scgRLl!pIX zdQ}vvPUaJriyNy)5zX1!HtXC&%4?~iJyDzSA&`Bpg0j6IT zNIDIC(fKrF9>cEC|O0H-dt|4Iq4vb>D;tj`MZmUvRo=V%<%p? z)FV1twSKAp>I?}WtY&!*M0Yh1z^(l~kl3*>MZtwZYZ+~EdO=~VWc`(R?nJ@cg-)M^ z#U*h);@xUnSR5RcL(rv|YnAI?`TKJJJg*}=hI&}~0HtR;xNADx`;F%|r1Unoy@_Q5 zMaK0nKN0*Ei6A6%;dh^e zf?m$)uRb;<@b$)<$o=5QV7CxqCT2lY(;;w6y_WXG)%cM4ozBd8-N|yV(sX|_C>hmX z@|^H-|B=f)>GaHVlc~&r;T!EF*^j-xIPP+iF%o^RgUL-xHtQQ^Uxzo>q_4STSiv<& zfTzONJr~ENJtv!P(&qETD706*%tbAs;tBcC*sJVVsE9C)t_(|k!}H#{!6%Fgw{s2O z!s0tt5pRZbZ*?_iemP;*RULS7u3^7=Cw9*r@u^2_EZ?7d-}pt#-7`^5Z)*$s4-q@< z$)!D0z7doXBWG*xMwx9Vx;8c<$SudF>Yxm%4i%H&faaS;#D^tk_oQh!>88C>%T8XO zqbFHjE>)qmj`4cvj8EPVWpk(NHjk$~N@+=$+bP~GGyXab@d9}n<@7y8Yc#d`Jyjce z`ZE2e3n#9%i@S*&x78j}44t9JOZ%JbxS|6Pb8R?iJ=LaU&Z9xo0_&I&BUJqB-7R>x zgL?PwsX99yi313)G^|G>TubC8`@C0c=gOU#x-SZ~IKttt$WO}_o0T#;e^)@8WA6Lz zLH)^YNPzp9aPly8va@T{O7I|kGZ47GNfOTUQIr9Qh?s3h^(sO^TkpOg0ZntXD{%IuoZ=HDr+%BQ7}X?(4hd%U%6lY zB#7_CCS?G2di1FkvgTDvqWm(BvfWA54!$iZGc^__7^Bvqa(>J0(JsP?tTM_^;@Tdn zuniML=wJxX>aJ`N+k=Bs5_IIupQ0xl4nguV&7kU!#TNKn*FJe1Y?i%dTH>6R_tIgg zQ}mV_xHq2uD)2j>SlMFGdR%cf%d8zctC&*9ZsAs+f?W}}&tDx&D{19c8xBs@E6^4? za{r#o_DwHzQsAn@^j7<(o@lAS9&KMKWluMwo^N%Ly4jI^fFg&jW^Cq*!uA((UfgQR z*|%<@K+9~1ryMs zU5u)j>rB;Vs5b?J&YuPyUt<~kRh*@sW9{e=vh z=SF-Mg(d@uW$X-Kal0^Uu!$9=aPOeCFOE1>WQF^?=u(YQGYMYKV=&kr<|ZiyBuj`K zYhsD-h-9v*sV+6VPZzc!guNs!<4L}%mUY7q)ASrPI)sAWPHNZE z(Pi#SS>=`Js|r`S6t+Ok?jX$!u@+1A^R7~KX&d5UKY}QRN=osNfHVgZCpRu1Yq0q+ zJS^>sJdOC;(?l>(FWgZ1%`|vhscxAjmc=NBXK$EsulK=FV20-YliI>y9kHAY_!Cs$ zyIzOWZ1bCxQfkw=90OPOdVGFO8~0UH>olngP%0wc^9-)c z34NGaQKPdOR5mru*N{hU{2F7ZIF+_a!-Cu_Z0H`tebmFxDE7nrQ`!e|j*S34#?%5U zhun4_mP5nQ1}~i#sELRkU_u(83gq7kWY9N|e8ne=L3-*)scFThr<3LxbdAWO&l?4q4 z={5xW?_Q0mj^JJ50E^vai9IDQ#3VeXK2w0i97P$Am+|Ex#o0d=T(gTZ-2{xpvcOFXKvORMF73?_F=@Gl(gc zha9iAeNNKB4gEf9NP)Y@8eyLH6Lt;LnBHUvw-7P%8b3{D82j$hcE-D-^$7EKEGx!{ zgws1$m^FFd`=8#*(EA;sTs?*zv!jmq($YB|VH%Dt@Q&ya8_sUj4{m5&;m;c#oFY9M z`Nj`%&q?b*-0>aHATSL$3;q&J_3bMC;3D1ZLlzq=Nmb(v_(5N*O~hw1Y4nK2b{y*= zV2yntfr4yK$Io4JuJ0Gg5=%--8RO&5T=(z5AGSOxW(Otvj1f7rPqM@)idIoKgKId0DOx>ezO3jSzBLSa~CnvO!SxbdBqm83Mj;tz&1|DM8(` zX6`&m{D(OPgC6z9t3>uRb}3C^1^b9%{|v7H2}PZ0iRm+q zgGX4VE7~nA`a%_M&jY^Uf6Z6lJLi@DIqc@D$z3?d)Md9zqJKek-J<_}Dl75oX((z~ z=grCWeHd@&h5FpK8SE*lIY|Oq5Lia_CV8E7qMt@(ldYmqD)s-4xuEd#KN$Ky5yo!E z1F9@0;`>4EZuFqYDQ|Vm_d0I*N45NK)TX>>2 zLFjNoP?0Cxu5mWDcni$yrGmdzY3?F%A!*lCxyc^)fF%Mi<&tC!h`)>7?85f54sbz* z@GTa07JMvo9BZFL!SWt|m;mG%A$N!nR@_x?Ij4fY;ZpI+?+0S&I7+;GMZlen7Me_S z81@^co&7@lF$ZDYSCX530&Ip@>zO=TZE9feR>mlS+3dtEf8T3Q{}(9_L}?;u==!|& zO`DZI^06c{(#66n6J|GJOx{FOsu2HjU70ny6fV`tSuEsbN(er{y{P&5^3?ye%@;!! z`NOm1b}EWU;ymtUnlw*&_}6v%i~-8qIse*3_uSqiGvz+X;c^vWiXuOX_>aZ)pGHET z8~ftF(x!*bwU4s*Q3p&^mB1Ir8ENLdevXia!N4csS-NprnRZ*$eJe(6<1_f%m7yQD ziXTDw_S%4L+WIw+N$sV}3ehA<;~rTSs!WQ`L~%runfnK8^`6iBGTYM z*Q|5yJPcR?CzV-aeXGhic~lsTjAUQ;clGd>M9jBx4w@|eFUp=#a!67PAR5WP%$9LW z7ub)2lDRZuu4U^^oTK9z7L`zOn;kVHqmBghP;DAChCjWCvUm_%chnvWFi}ys=O%;= zYe=#sfR?q(miv|pLBrN?!>sB1%A3M#;5qxsxmae$of<*?` z(KPg7PuA8f1Fd`CHQ2eCH4F>hSz|CH)-b_k$`oe1Z^6tnm*Nrl?4I;9{$l5_iojv` zGO-PB;;$*6NQDUUIB`yWkqKtm@h1Ml&nyxM=E`NCP$Fp}d8|0*FSM-IjD+ty3I&Gl z2xD11hF|!tPV!v6dIriC#dndX1>NpX^=K5_6neo|d$ed9ggU0g9;z2_o#v%kGsJ4y z&*PdFWxs9X@tvpnPt-s9Q2k#T>P&0ig@aDt^Sa_Z4q zU^mV;TgBbpG)Pr>KAKvkl$@5o7vqwm>CNoT=UXxxb}X*ye>)sWxr(~PgTE`X^okEdW5@wJ!ocoDiz z=)Bm7o@q#UBvL&JG6Wk4fV+~oHw7u*Z)d$;to44qwLeU9X_t)z(D>17)RbX`yD4AC zdQ$(|h;}!uJ2Y`GLdWu%O0$3c`4xtnF!eWQ5stUy+@*7MdFA&H}g;k-FIOWpbPp=e*R=W^L|w zotY%oxhp1jI^@&l>8IXT`^tyqc)k3W^d)8;0`^=O!_u>d5$;BtAPOz6MZD>&R z*+Y^tNSLR#RC&a5UFwq?P_chau9(9;?io0sr>=1ONsv;;!1X}MCmX@mm^XKl?}&TX z-7iWVGqN!q?m3OH;yx*!tTyG7J6)6h*cqtn$A82F5y4fI%$IA*am5H z!$bB^$?@T-t`|M`OhXG(xWf_6fa$5wWYUnFv?%QaLTonVUAZ-VSH9PB`ErA)x#*FB zistX*YCLdiqnLd>)_$7G!1*|*I+QC!Zm3(e{6Nm>a@a1~*}U9vsfTruCY6c>7}Y2D zH1=}wHAz$ITHhxN`ApVORg8rotxTff)dOy}>Gqo>r_|fJ!9k$@=y06^wRYXO+Xn9@ z{z&Z+4N!A=M=darFtc~{(6tI%%M4>^vk#$s1X zBV*IgVbwj=bM(RS3;7+mZ-nt<$UjFyKQB$h&T_846?fI}lQC+f?RksWft{$i|H6={ zGM|%Gw9d7xmBcK*x`L=s%5%R7F!-xf-(T83RlvXWg-(l41&7jpp`gk{6z+?fW&Xb= z2H|7~dZBq@$gO%hrZw#h?_rpc!$6$cT-oRY*HA8~o9bm*hU-+lx$N6&3EpdcT}1X2 zH)~hwn19%e*0o_=qp>^&-NFyz*cEMug8I+n%#{*A)oOhyx8{)onM+z~b@HlegB^$OB%8J4ABE=M5; z;Zoupi7m>_)nBw)Fb`ikJRe2#pF+CC{`S_QqLrkE*R<%$?!m@AWy`K6n$_KREAk`kSvcN0}6$F;(6kwP1@OuOst;a;zX17I3)}S-Hz$yLndBo<#4m^lCTyozaGJAZ zSW0wm!AniE!jzh9D+Jz@P)2x9(%8v!i2-wv%k#Xc#?97{WG73t6%jI$dF*gbF9PP4 z_OGe0bZjz^0L((?Lv`*Lv~26S(g*sArB4L&NdmiL$vi8kW>-|4|-2T#c{ZEd<;-H+0 zc+D1XlvyuVb+!|2#L_VH@!FjIA9dY0<0yUtJi8Mzvq6Rbgl~IWk8Y}A04N)D>YN3diet8IVR1@kVhe85ccmC?6 z^CNH%s~ibfXU*GqEA_v5v1ddTJ!Rn}MUZ#>8S`S)xe}B@=a1wU_L9C$u zYp`jDePR-Io(Gfj1i{hLQ5UbR+51eApc*Qnq5|BYnMHJ({rR?RJOr+qDyxz_;8Fh{p6@6gX?pxqpejuE(sdjT8$?j>1mT`?B_aXj$5rx|Uz6e7hhWZ-vj%$7 zQ|8MPL= min_subjects: - if average_maps: - bin_clickmaps.append(np.array(binary_maps).mean(0, keepdims=True)) - else: - bin_clickmaps.append(np.array(binary_maps)) + if average_maps: + bin_clickmaps.append(np.array(binary_maps).mean(0, keepdims=True)) + else: + bin_clickmaps.append(np.array(binary_maps)) # else: # print("Not enough subjects", key, len(binary_maps)) @@ -1778,11 +1770,10 @@ def process_all_maps_multi_thresh_gpu( mask = binary_maps.sum((-2, -1)) >= min_clicks binary_maps = binary_maps[mask] # If we have enough valid maps, average them and keep this image - if len(binary_maps) >= min_subjects: - if average_maps: - bin_clickmaps.append(np.array(binary_maps).mean(0, keepdims=True)) - else: - bin_clickmaps.append(np.array(binary_maps)) + if average_maps: + bin_clickmaps.append(np.array(binary_maps).mean(0, keepdims=True)) + else: + bin_clickmaps.append(np.array(binary_maps)) # else: # print("Not enough subjects", key, len(binary_maps)) @@ -1810,21 +1801,21 @@ def process_all_maps_multi_thresh_gpu( clickmap_bins[key] = np.asarray(bin_counts) # Add to all_clickmaps with the appropriate method if save_to_disk: - temp_group.create_dataset(f"clickmap_{str(clickmap_idx).zfill(8)}", data=np.stack(bin_clickmaps, axis=0)) + temp_group.create_dataset(key, data=np.stack(bin_clickmaps, axis=0)) elif return_before_blur: bin_clickmaps = np.stack(bin_clickmaps, axis=0) if max_subjects > 0: max_subjects = min(max_subjects, bin_clickmaps.shape[1]) bin_clickmaps = bin_clickmaps[:, :max_subjects, :, :] - all_clickmaps.append(np.stack(bin_clickmaps, axis=0)) + all_clickmaps[key] = (np.stack(bin_clickmaps, axis=0)) else: - all_clickmaps.append(np.concatenate(bin_clickmaps, axis=0)) + all_clickmaps[key] = (np.concatenate(bin_clickmaps, axis=0)) if save_to_disk: temp_file.close() if not save_to_disk and not all_clickmaps: print("No valid clickmaps to process") - return {}, [], [], [], {}, [] + return {}, {}, [], [], {}, {} if return_before_blur: return final_clickmaps, all_clickmaps, categories, keep_index, click_counts, clickmap_bins @@ -1834,7 +1825,7 @@ def process_all_maps_multi_thresh_gpu( print(f"Preparing to blur {total_maps} image clickmaps using GPU with adaptive kernel sizing...") # Convert all maps to tensors - all_tensors = [torch.from_numpy(maps).float() for maps in all_clickmaps] + # all_tensors = [torch.from_numpy(maps).float() for maps in all_clickmaps] # Group images by their required kernel size to batch efficiently kernel_groups = {} @@ -1857,12 +1848,12 @@ def process_all_maps_multi_thresh_gpu( kernel_key = (adj_blur_size, adj_blur_sigma) if kernel_key not in kernel_groups: kernel_groups[kernel_key] = [] - kernel_groups[kernel_key].append(idx) + kernel_groups[kernel_key].append(key) print(f"Processing {len(kernel_groups)} different kernel sizes...") # Process each kernel group separately - for (kernel_size, kernel_sigma), image_indices in tqdm(kernel_groups.items(), desc="Processing kernel groups"): - print(f"Processing {len(image_indices)} images with kernel size {kernel_size}, sigma {kernel_sigma}") + for (kernel_size, kernel_sigma), image_keys in tqdm(kernel_groups.items(), desc="Processing kernel groups"): + print(f"Processing {len(image_keys)} images with kernel size {kernel_size}, sigma {kernel_sigma}") # print(f"Processing {len(image_indices)} images with kernel size {kernel_size}, sigma {kernel_sigma}") # Create kernel for this group @@ -1870,26 +1861,29 @@ def process_all_maps_multi_thresh_gpu( kernel = circle_kernel(kernel_size, kernel_sigma, 'cuda') # Process images in this group in batches - group_batch_size = min(gpu_batch_size, len(image_indices)) - num_batches = (len(image_indices) + group_batch_size - 1) // group_batch_size + group_batch_size = min(gpu_batch_size, len(image_keys)) + num_batches = (len(image_keys) + group_batch_size - 1) // group_batch_size for batch_idx in range(num_batches): # Get batch indices for this kernel group batch_start = batch_idx * group_batch_size - batch_end = min(batch_start + group_batch_size, len(image_indices)) - batch_image_indices = image_indices[batch_start:batch_end] + batch_end = min(batch_start + group_batch_size, len(image_keys)) + batch_image_keys = image_keys[batch_start:batch_end] # Get tensors for this batch - batch_tensors = [all_tensors[idx] for idx in batch_image_indices] - + # batch_tensors = [all_tensors[idx] for idx in batch_image_indices] + batch_tensors = {} + for key in batch_image_keys: + single_tensor = torch.from_numpy(all_clickmaps[key]).float() + batch_tensors[key] = single_tensor # Group batch tensors by shape to handle different dimensions within the same kernel group shape_groups = {} - for i, tensor in enumerate(batch_tensors): + for i, (key, tensor) in enumerate(batch_tensors.items()): shape_key = tuple(tensor.shape) if shape_key not in shape_groups: shape_groups[shape_key] = [] - shape_groups[shape_key].append((i, tensor, batch_image_indices[i])) + shape_groups[shape_key].append((i, tensor, key)) # Process each shape group separately for shape, tensor_data in shape_groups.items(): - indices, tensors, img_indices = zip(*tensor_data) + indices, tensors, img_keys = zip(*tensor_data) # Calculate memory-safe batch size for this shape group # Estimate memory usage: shape[0] * shape[1] * shape[2] * 4 bytes per float32 memory_per_tensor = shape[0] * shape[1] * shape[2] * 4 # bytes @@ -1908,7 +1902,7 @@ def process_all_maps_multi_thresh_gpu( end_idx = min(start_idx + safe_batch_size, len(tensors)) batch_tensors_subset = tensors[start_idx:end_idx] - batch_img_indices_subset = img_indices[start_idx:end_idx] + batch_img_keys_subset = img_keys[start_idx:end_idx] try: # Try to concatenate tensors of the same shape shape_batch_tensor = torch.cat(batch_tensors_subset, dim=0).unsqueeze(1).to('cuda') @@ -1917,8 +1911,9 @@ def process_all_maps_multi_thresh_gpu( blurred_tensor = convolve(shape_batch_tensor, kernel, double_conv=True) # Convert back to numpy and update results blurred_maps = blurred_tensor.squeeze(1).cpu().numpy() - for i, img_idx in enumerate(batch_img_indices_subset): - all_clickmaps[img_idx] = blurred_maps[i*thresholds:(i+1)*thresholds] # Keep the same shape with extra dimension + for i, img_key in enumerate(batch_img_keys_subset): + + all_clickmaps[img_key] = blurred_maps[i*thresholds:(i+1)*thresholds] # Keep the same shape with extra dimension # Clean up GPU memory for this shape batch del shape_batch_tensor, blurred_tensor torch.cuda.empty_cache() @@ -1926,10 +1921,10 @@ def process_all_maps_multi_thresh_gpu( except Exception as e: # If concatenation still fails, process individually print(f"Shape batch processing failed for shape {shape} (batch {shape_batch_idx+1}/{num_shape_batches}), processing {len(batch_tensors_subset)} images individually: {e}") - for i, (tensor, img_idx) in enumerate(zip(batch_tensors_subset, batch_img_indices_subset)): + for i, (tensor, img_key) in enumerate(zip(batch_tensors_subset, batch_img_keys_subset)): gpu_tensor = tensor.unsqueeze(1).to('cuda') blurred_tensor = convolve(gpu_tensor, kernel, double_conv=True) - all_clickmaps[img_idx] = blurred_tensor.squeeze(1).cpu().numpy() + all_clickmaps[img_key] = blurred_tensor.squeeze(1).cpu().numpy() # Clean up GPU memory del gpu_tensor, blurred_tensor diff --git a/tools/find_top_bottom.py b/tools/find_top_bottom.py index 39b65bf..69647ab 100644 --- a/tools/find_top_bottom.py +++ b/tools/find_top_bottom.py @@ -24,22 +24,17 @@ def get_num_subjects(): return total_numbers def plot_clickmap(img, hmp, score, num_subjects, img_name, image_output_dir): - img = np.asarray(img) - if img.shape[0] != hmp.shape[0]: - print(img_name, img.shape, hmp.shape) - # f = plt.figure() - # plt.subplot(1, 2, 1) - # plt.imshow(np.asarray(img)) - # print(img_name, np.asarray(img).shape, hmp.shape) - - # title = f"{img_name}\nSpearman: {score}\nNum Subjects: {num_subjects}" - # plt.title(title) - # plt.axis("off") - # plt.subplot(1, 2, 2) - # plt.imshow(hmp) - # plt.axis("off") - # plt.savefig(os.path.join(image_output_dir, img_name.replace('/', '_'))) - # plt.close() + f = plt.figure() + plt.subplot(1, 2, 1) + plt.imshow(np.asarray(img)) + title = f"{img_name}\nSpearman: {score}\nNum Subjects: {num_subjects}" + plt.title(title) + plt.axis("off") + plt.subplot(1, 2, 2) + plt.imshow(hmp) + plt.axis("off") + plt.savefig(os.path.join(image_output_dir, img_name.replace('/', '_'))) + plt.close() return if __name__ == "__main__": @@ -49,33 +44,31 @@ def plot_clickmap(img, hmp, score, num_subjects, img_name, image_output_dir): os.makedirs(image_output_dir, exist_ok=True) with open(scores_json, 'r') as f: scores_dict = json.load(f)['all_img_ceilings'] - - val_map_files = ['assets/jay_imagenet_val_08_27_2025_batch001.h5', 'assets/jay_imagenet_val_08_27_2025_batch002.h5', - 'assets/jay_imagenet_val_08_27_2025_batch003.h5', 'assets/jay_imagenet_val_08_27_2025_batch004.h5'] - num_subjects_dict = get_num_subjects() - top10 = dict(sorted(scores_dict.items(), key=lambda x: x[1], reverse=True)[:1000]) - bot10 = dict(sorted(scores_dict.items(), key=lambda x: x[1], reverse=False)[:1000]) - top10_maps = [] - bot10_maps = [] + metadata = np.load("image_metadata/jay_imagenet_val_04_30_2025_dimensions.npy", allow_pickle=True).item() + val_map_files = ['assets/imgnet_val/jay_imagenet_val_08_27_2025_batch001.h5', 'assets/imgnet_val/jay_imagenet_val_08_27_2025_batch002.h5', + 'assets/imgnet_val/jay_imagenet_val_08_27_2025_batch003.h5', 'assets/imgnet_val/jay_imagenet_val_08_27_2025_batch004.h5'] + top10 = dict(sorted(scores_dict.items(), key=lambda x: x[1], reverse=True)[:10]) + bot10 = dict(sorted(scores_dict.items(), key=lambda x: x[1], reverse=False)[:10]) + top10_maps = {} + bot10_maps = {} for map_file in val_map_files: map_content = h5py.File(map_file, 'r')['clickmaps'] for img_name in top10: img_name = img_name.replace('/', '_') if img_name in map_content: - top10_maps.append(map_content[img_name]['clickmap'][:].mean(0)) + top10_maps[img_name] = map_content[img_name]['clickmap'][:].mean(0) for bot_img_name in bot10: bot_img_name = bot_img_name.replace('/', '_') - if bot_img_name in map_content: - bot10_maps.append(map_content[bot_img_name]['clickmap'][:].mean(0)) - if len(top10_maps) == 10 and len(bot10_maps) == 10: - break + bot10_maps[img_name] = map_content[bot_img_name]['clickmap'][:].mean(0) + top10_paths = [] bot10_paths = [] for img_name in top10: img_name = img_name.split('/')[1] img_path = os.path.join(data_root, f'{img_name}') top10_paths.append(img_path) + for img_name in bot10: img_name = img_name.split('/')[1] img_path = os.path.join(data_root, f'{img_name}') @@ -83,17 +76,29 @@ def plot_clickmap(img, hmp, score, num_subjects, img_name, image_output_dir): for i, img_name in enumerate(top10): score = scores_dict[img_name] + metadata_shape = metadata[img_name] + img_name = img_name.split('/')[1] - hmp = top10_maps[i] + hmp = top10_maps[img_name] img = Image.open(top10_paths[i]) - num_subjects = num_subjects_dict[img_name] + # num_subjects = num_subjects_dict[img_name] + num_subjects = 1 + img = np.asarray(img) + if img.shape[0] != hmp.shape[0]: + print(img_name, img.shape, hmp.shape, metadata_shape) plot_clickmap(img, hmp, score, num_subjects, f"top_{img_name}", image_output_dir) for i, img_name in enumerate(bot10): score = scores_dict[img_name] + metadata_shape = metadata[img_name] + img_name = img_name.split('/')[1] - hmp = bot10_maps[i] + hmp = bot10_maps[img_name] img = Image.open(bot10_paths[i]) - num_subjects = num_subjects_dict[img_name] + num_subjects = 1 + # num_subjects = num_subjects_dict[img_name] + img = np.asarray(img) + if img.shape[0] != hmp.shape[0]: + print(img_name, img.shape, hmp.shape, metadata_shape) plot_clickmap(img, hmp, score, num_subjects, f"bottom_{img_name}", image_output_dir) \ No newline at end of file diff --git a/tools/sample_clickmaps.py b/tools/sample_clickmaps.py index e064bf9..3f0dd44 100644 --- a/tools/sample_clickmaps.py +++ b/tools/sample_clickmaps.py @@ -8,6 +8,8 @@ clickme_data = process_clickme_data(clickme_data_file, True) total_maps = len(clickme_data) total_numbers = {} + map_file = 'assets/jay_imagenet_val_08_27_2025_batch001.h5' + #target_img_names = ["ILSVRC2012_val_00008676.JPEG", "ILSVRC2012_val_00009263.JPEG", "ILSVRC2012_val_00009305.JPEG", "ILSVRC2012_val_00013865.JPEG" # "ILSVRC2012_val_00021166.JPEG", "ILSVRC2012_val_00023616.JPEG", "ILSVRC2012_val_00024753.JPEG", "ILSVRC2012_val_00027647.JPEG", # "ILSVRC2012_val_00034111.JPEG", "ILSVRC2012_val_00038455.JPEG"] diff --git a/tools/validate_size.py b/tools/validate_size.py new file mode 100644 index 0000000..e8ff0ad --- /dev/null +++ b/tools/validate_size.py @@ -0,0 +1,26 @@ +import os +import numpy as np +import h5py +from PIL import Image +from tqdm import tqdm + +if __name__ == "__main__": + hdf_path = "assets/imgnet" + metadata = np.load("image_metadata/jay_imagenet_train_04_30_2025_dimensions.npy", allow_pickle=True).item() + data_root = "/gpfs/data/shared/imagenet/ILSVRC2012/train" + for i, hdf_file in tqdm(enumerate(os.listdir(hdf_path))): + if not hdf_file.endswith('.h5'): + continue + map_content = h5py.File(os.path.join(hdf_path, hdf_file), 'r')['clickmaps'] + for img_name in map_content: + metadata_img_name = img_name.replace('_', '/', 1) + img_cls = metadata_img_name.split('/')[0] + folder_img_name = metadata_img_name.split('/')[1] + if not os.path.exists(os.path.join(data_root, img_cls, folder_img_name)) or metadata_img_name not in metadata: + continue + img = Image.open(os.path.join(data_root, img_cls, folder_img_name)) + img = np.asarray(img) + metadata_shape = metadata[metadata_img_name][::-1] + hmp_shape = map_content[img_name]['clickmap'][:].mean(0).shape + if metadata_shape != hmp_shape or img.shape[:2] != hmp_shape: + print(i, img_name, img.shape[:2], metadata_shape, hmp_shape) \ No newline at end of file From a340dc291aa704e51f9ea9437dbe4651803c47fb Mon Sep 17 00:00:00 2001 From: PPPayson Date: Tue, 4 Nov 2025 10:34:52 -0500 Subject: [PATCH 16/16] Remove unnecessary gc calls --- ceiling_floor_estimate.py | 47 +- ceiling_floor_estimate_large.py | 47 +- .../imgnet_configs/imagenet_train_oscar.yaml | 14 +- .../imgnet_configs/imagenet_val_oscar.yaml | 16 +- .../imagenet_val_spearman_oscar.yaml | 16 +- prepare_clickmaps.py | 134 ++-- ...pute_floor.sh => compute_ceiling_floor.sh} | 2 +- src/utils.py | 736 +----------------- tools/find_top_bottom.py | 13 +- tools/verify_list.py | 13 + 10 files changed, 180 insertions(+), 858 deletions(-) rename scripts/{compute_floor.sh => compute_ceiling_floor.sh} (89%) create mode 100644 tools/verify_list.py diff --git a/ceiling_floor_estimate.py b/ceiling_floor_estimate.py index eaaf3f6..48a9197 100644 --- a/ceiling_floor_estimate.py +++ b/ceiling_floor_estimate.py @@ -19,11 +19,42 @@ from torchvision.transforms import functional as tvF from torchvision.transforms import InterpolationMode -def emd_2d(test_map, ref_map): - test_map = (test_map - test_map.min()) / (test_map.max() - test_map.min()+1e-8) - ref_map = (ref_map - ref_map.min()) / (ref_map.max() - ref_map.min()+1e-8) - return wasserstein_distance_nd(test_map, ref_map) - +# def emd_2d(test_map, ref_map): +# test_map = (test_map - test_map.min()) / (test_map.max() - test_map.min()+1e-8) +# ref_map = (ref_map - ref_map.min()) / (ref_map.max() - ref_map.min()+1e-8) +# return wasserstein_distance_nd(test_map, ref_map) + +def emd_2d(ref_map, test_map, eps=0.2, iters=): + """ + Sinkhorn EMD between standardized positive maps. Returns [B]. + """ + if s_map.ndim == 4: s_map = s_map.squeeze(1) + if c_map.ndim == 4: c_map = c_map.squeeze(1) + s = zplus(s_map).squeeze(1).clamp_min(0) + c = zplus(c_map).squeeze(1).clamp_min(0) + + B, H, W = s.shape + s = s.view(B, -1); c = c.view(B, -1) + s = s / (s.sum(dim=1, keepdim=True).clamp_min(1e-8)) + c = c / (c.sum(dim=1, keepdim=True).clamp_min(1e-8)) + + M = _grid_cost(H, W, s.device).unsqueeze(0).expand(B, -1, -1) + return _sinkhorn(s, c, M, eps=eps, iters=iters) + +def _sinkhorn(a, b, M, eps, iters): + """ + a,b: [B,P] prob vectors; M: [B,P,P] ground cost. + Returns: [B] transport cost. + """ + K = torch.exp(-M / eps) + u = torch.full_like(a, 1.0 / a.size(1)) + v = torch.full_like(b, 1.0 / b.size(1)) + for _ in range(iters): + u = a / (K @ v).clamp_min(1e-8) + v = b / (K.transpose(1,2) @ u).clamp_min(1e-8) + P = torch.diag_embed(u) @ K @ torch.diag_embed(v) + return (P * M).sum(dim=(1,2)) + def rank_cosine(test_map, ref_map): ref_map = ref_map.flatten() test_map = test_map.flatten() @@ -257,7 +288,7 @@ def compute_rotation_correlation_batch(batch_indices, all_data, all_names, metri else: rand_scores = np.nanmean(np.asarray(rand_scores)) level_scores.append(rand_scores) - gc.collect() + # gc.collect() angle_score = np.nanmean(np.asarray(level_scores)) batch_results[target_img_diff].append(angle_score) all_rotation_results[img_name][target_img_diff] = angle_score @@ -393,7 +424,7 @@ def compute_scale_correlation_batch(batch_indices, all_data, all_names, metric=" del blur_clickmaps rand_scores = np.nanmean(np.asarray(rand_scores)) level_scores.append(rand_scores) - gc.collect() + # gc.collect() scale_results[img_name][target_img_diff] = np.nanmean(np.asarray(level_scores)) batch_results[target_img_diff].append(np.nanmean(np.asarray(level_scores))) if not floor: @@ -536,7 +567,7 @@ def compute_correlation_batch(batch_indices, all_clickmaps, all_names, metric="a rand_corrs = np.nanmean(np.asarray(rand_corrs)) # Take the mean of the random correlations level_corrs.append(rand_corrs) # Free memory - gc.collect() + # gc.collect() batch_results.append(np.asarray(level_corrs).mean()) # Integrate over the levels all_scores[img_name] = batch_results[-1] return batch_results, all_scores diff --git a/ceiling_floor_estimate_large.py b/ceiling_floor_estimate_large.py index d4c1041..37bd5e0 100644 --- a/ceiling_floor_estimate_large.py +++ b/ceiling_floor_estimate_large.py @@ -150,9 +150,9 @@ def compute_correlation_batch(batch_indices, all_clickmaps, all_names, metric="a all_scores = {} max_kernel_size = config.get("max_kernel_size", 51) blur_sigma_function = config.get("blur_sigma_function", lambda x: x) - for i in tqdm(batch_indices, desc="Computing split-half correlations", total=len(batch_indices)): + for i in tqdm(batch_indices, desc="Computing split-half correlations", total=len(batch_indices), mininterval=10): img_name = all_names[i] - clickmaps = all_clickmaps[img_name] + clickmaps = all_clickmaps[img_name.replace('/', '_')] level_corrs = [] if metadata and img_name in metadata: native_size = metadata[img_name] @@ -173,7 +173,7 @@ def compute_correlation_batch(batch_indices, all_clickmaps, all_names, metric="a if rand_i >= i: rand_i += 1 rand_name = all_names[rand_i] - random_map = all_clickmaps[rand_name] + random_map = all_clickmaps[rand_name.replace('/', '_')] if metadata and rand_name in metadata: native_size = metadata[rand_name] short_side = min(native_size) @@ -268,7 +268,7 @@ def compute_correlation_batch(batch_indices, all_clickmaps, all_names, metric="a rand_corrs = np.nanmean(np.asarray(rand_corrs)) # Take the mean of the random correlations level_corrs.append(rand_corrs) # Free memory - gc.collect() + # gc.collect() batch_results.append(np.asarray(level_corrs).mean()) # Integrate over the levels all_scores[img_name] = batch_results[-1] return batch_results, all_scores @@ -390,10 +390,10 @@ def compute_correlation_batch(batch_indices, all_clickmaps, all_names, metric="a os.makedirs(output_dir, exist_ok=True) os.makedirs(image_output_dir, exist_ok=True) - os.makedirs(os.path.join(output_dir, config["experiment_name"]), exist_ok=True) + # os.makedirs(os.path.join(output_dir, config["experiment_name"]), exist_ok=True) # Create dedicated directory for click counts click_counts_dir = os.path.join(output_dir, f"{config['experiment_name']}_click_counts") - os.makedirs(click_counts_dir, exist_ok=True) + # os.makedirs(click_counts_dir, exist_ok=True) # Original code for non-HDF5 format hdf5_path = os.path.join(output_dir, f"{config['experiment_name']}.h5") @@ -535,18 +535,13 @@ def compute_correlation_batch(batch_indices, all_clickmaps, all_names, metric="a print(f"Computing split-half correlations in parallel (n_jobs={n_jobs}, batch_size={correlation_batch_size})...") temp_file = h5py.File(temp_dir, 'r') temp_group = temp_file['clickmaps'] - all_clickmaps=temp_group + all_clickmaps = temp_group num_clickmaps = len(temp_group) + print(f"Num clickmaps {len(temp_group)}") # Prepare batches for correlation computation indices = list(range(num_clickmaps)) batches = [indices[i:i+correlation_batch_size] for i in range(0, len(indices), correlation_batch_size)] - # # Reduce the number of jobs if there are many batches to prevent too many files open - # adjusted_n_jobs = min(n_jobs, max(1, 20 // len(batches) + 1)) - # if adjusted_n_jobs < n_jobs: - # print(f"Reducing parallel jobs from {n_jobs} to {adjusted_n_jobs} to prevent 'too many files open' error") - # n_jobs = adjusted_n_jobs - # Process correlation batches in parallel ceiling_returns = Parallel(n_jobs=n_jobs, prefer="threads")( delayed(compute_correlation_batch)( @@ -566,6 +561,23 @@ def compute_correlation_batch(batch_indices, all_clickmaps, all_names, metric="a ceiling_results, all_ceilings = zip(*ceiling_returns) # Force garbage collection between major operations gc.collect() + all_img_ceilings = {} + for img_ceilings in all_ceilings: + for img_name, score in img_ceilings.items(): + all_img_ceilings[img_name] = score + all_ceilings = np.concatenate(ceiling_results) + mean_ceiling = np.nanmean(all_ceilings) + if config['save_json']: + # Save as json + with open(os.path.join(output_dir, f"{config['experiment_name']}_{config['metric']}_ceiling_results.json"), 'w') as f: + output_json = {"all_imgs": final_keep_index, 'mean_ceiling':mean_ceiling, + 'all_ceilings':all_ceilings, 'all_img_ceilings':all_img_ceilings + } + for key, value in output_json.items(): + if isinstance(value, np.ndarray): + output_json[key] = value.tolist() + output_content = json.dumps(output_json, indent=4) + f.write(output_content) floor_returns = Parallel(n_jobs=n_jobs, prefer="threads")( delayed(compute_correlation_batch)( @@ -583,21 +595,16 @@ def compute_correlation_batch(batch_indices, all_clickmaps, all_names, metric="a ) for batch in tqdm(batches, desc="Computing floor batches", total=len(batches)) ) floor_results, all_floors = zip(*floor_returns) - all_img_ceilings = {} all_img_floors = {} - for img_ceilings in all_ceilings: - for img_name, score in img_ceilings.items(): - all_img_ceilings[img_name] = score + for img_ceilings in all_floors: for img_name, score in img_ceilings.items(): all_img_floors[img_name] = score # Flatten the results - all_ceilings = np.concatenate(ceiling_results) all_floors = np.concatenate(floor_results) # Compute the mean of the ceilings and floors - mean_ceiling = all_ceilings.mean() - mean_floor = all_floors.mean() + mean_floor = np.nanmean(all_floors) # Compute the ratio of the mean of the ceilings to the mean of the floors ratio = mean_ceiling / mean_floor diff --git a/configs/imgnet_configs/imagenet_train_oscar.yaml b/configs/imgnet_configs/imagenet_train_oscar.yaml index 7aa40a5..21fd032 100644 --- a/configs/imgnet_configs/imagenet_train_oscar.yaml +++ b/configs/imgnet_configs/imagenet_train_oscar.yaml @@ -1,15 +1,15 @@ -assets: assets/imgnet_train +assets: assets/imgnet_train_10_15 blur_size: 21 center_crop: - 224 - 224 class_filter_file: false -clickme_data: /cifs/data/tserre_lrs/projects/projects/prj_video_imagenet/human_clickme_data_processing/clickme_datasets/train_combined_08_27_2025.npz +clickme_data: /cifs/data/tserre_lrs/projects/projects/prj_video_imagenet/human_clickme_data_processing/clickme_datasets/train_combined_10_15_2025.npz correlation_batch_size: 4000 debug: false display_image_keys: auto -example_image_output_dir: jay_imagenet_train_combined_08_27_2025_images -experiment_name: jay_imagenet_train_08_27_2025 +example_image_output_dir: jay_imagenet_train_combined_10_15_2025_images +experiment_name: jay_imagenet_train_10_15_2025 file_exclusion_filter: ILSVRC2012_val file_inclusion_filter: null filter_mobile: true @@ -32,8 +32,8 @@ parallel_save: false participant_filter: false percentile_thresh: 50 preprocess_db_data: true -processed_clickme_file: jay_imagenet_train_combined_08_27_2025_processed.npz -processed_medians: jay_imagenet_train_combined_08_27_2025_medians.json +processed_clickme_file: jay_imagenet_train_combined_10_15_2025_processed.npz +processed_medians: jay_imagenet_train_combined_10_15_2025_medians.json remove_string: imagenet/train/ output_format: "hdf5" use_cython: true @@ -41,4 +41,4 @@ chunk_size: 100000 batch_size: 14000 time_based_bins: true multi_thresh_gpu: multi_thresh_gpu -processed_clickmap_bins: jay_imagenet_train_combined_08_27_2025_clickmap_bins.npy +processed_clickmap_bins: jay_imagenet_train_combined_10_15_2025_clickmap_bins.npy diff --git a/configs/imgnet_configs/imagenet_val_oscar.yaml b/configs/imgnet_configs/imagenet_val_oscar.yaml index 750f39e..6dd7b4e 100644 --- a/configs/imgnet_configs/imagenet_val_oscar.yaml +++ b/configs/imgnet_configs/imagenet_val_oscar.yaml @@ -1,16 +1,16 @@ -assets: assets/imgnet_val +assets: assets/imgnet_val_10_15 temp_dir: temp blur_size: 21 center_crop: - 224 - 224 class_filter_file: false -clickme_data: /cifs/data/tserre_lrs/projects/projects/prj_video_imagenet/human_clickme_data_processing/clickme_datasets/val_combined_08_27_2025.npz +clickme_data: /cifs/data/tserre_lrs/projects/projects/prj_video_imagenet/human_clickme_data_processing/clickme_datasets/val_combined_10_15_2025.npz correlation_batch_size: 1024 debug: false display_image_keys: auto -example_image_output_dir: jay_imagenet_val_combined_08_27_2025_images -experiment_name: jay_imagenet_val_08_27_2025 +example_image_output_dir: jay_imagenet_val_combined_10_15_2025_images +experiment_name: jay_imagenet_val_10_15_2025 file_exclusion_filter: null file_inclusion_filter: ILSVRC2012_val filter_mobile: true @@ -22,7 +22,7 @@ image_shape: mask_dir: null mask_threshold: 0 max_clicks: 1000000 -metadata_file: image_metadata/jay_filteimagenet_val_04_30_2025_dimensions.npy +metadata_file: image_metadata/jay_imagenet_val_04_30_2025_dimensions.npy metric: auc min_clicks: 1 min_subjects: 5 @@ -33,12 +33,12 @@ parallel_save: false participant_filter: false percentile_thresh: 50 preprocess_db_data: true -processed_clickme_file: jay_imagenet_val_combined_08_27_2025_processed.npz -processed_medians: jay_imagenet_val_combined_08_27_2025_medians.json +processed_clickme_file: jay_imagenet_val_combined_10_15_2025_processed.npz +processed_medians: jay_imagenet_val_combined_10_15_2025_medians.json remove_string: imagenet/val/ time_based_bins: true multi_thresh_gpu: multi_thresh_gpu output_format: "hdf5" -processed_clickmap_bins: jay_imagenet_val_combined_08_27_2025_clickmap_bins.npy +processed_clickmap_bins: jay_imagenet_val_combined_10_15_2025_clickmap_bins.npy chunk_size: 100000 batch_size: 14000 diff --git a/configs/imgnet_configs/imagenet_val_spearman_oscar.yaml b/configs/imgnet_configs/imagenet_val_spearman_oscar.yaml index 6471b2e..5756e28 100644 --- a/configs/imgnet_configs/imagenet_val_spearman_oscar.yaml +++ b/configs/imgnet_configs/imagenet_val_spearman_oscar.yaml @@ -1,16 +1,16 @@ -assets: assets/imgnet_val -temp_dir: temp/imgnet_val.h5 +assets: assets/imgnet_val_10_15 +temp_dir: temp/imgnet_val_10_15.h5 blur_size: 21 center_crop: - 224 - 224 class_filter_file: false -clickme_data: /cifs/data/tserre_lrs/projects/projects/prj_video_imagenet/human_clickme_data_processing/clickme_datasets/val_combined_08_27_2025.npz +clickme_data: /cifs/data/tserre_lrs/projects/projects/prj_video_imagenet/human_clickme_data_processing/clickme_datasets/val_combined_10_15_2025.npz correlation_batch_size: 4096 debug: false display_image_keys: auto -example_image_output_dir: jay_imagenet_val_combined_08_27_2025_images -experiment_name: jay_imagenet_val_08_27_2025 +example_image_output_dir: jay_imagenet_val_combined_10_15_2025_images +experiment_name: jay_imagenet_val_10_15_2025 file_exclusion_filter: null file_inclusion_filter: ILSVRC2012_val filter_mobile: true @@ -33,13 +33,13 @@ parallel_save: false participant_filter: false percentile_thresh: 50 preprocess_db_data: true -processed_clickme_file: jay_imagenet_val_combined_08_27_2025_processed.npz -processed_medians: jay_imagenet_val_combined_08_27_2025_medians.json +processed_clickme_file: jay_imagenet_val_combined_10_15_2025_processed.npz +processed_medians: jay_imagenet_val_combined_10_15_2025_medians.json remove_string: imagenet/val/ time_based_bins: true multi_thresh_gpu: multi_thresh_gpu output_format: "hdf5" -processed_clickmap_bins: jay_imagenet_val_combined_08_27_2025_clickmap_bins.npy +processed_clickmap_bins: jay_imagenet_val_combined_10_15_2025_clickmap_bins.npy chunk_size: 100000 batch_size: 14000 save_json: true diff --git a/prepare_clickmaps.py b/prepare_clickmaps.py index 2d2c2d5..5572c6b 100644 --- a/prepare_clickmaps.py +++ b/prepare_clickmaps.py @@ -482,79 +482,79 @@ np.save(os.path.join(output_dir, config["processed_clickmap_bins"]), clickmap_bins) # Process visualization for display images if needed - if config["display_image_keys"]: - if config["display_image_keys"] == "auto": - sz_dict = {k: len(v) for k, v in final_clickmaps.items()} - arg = np.argsort(list(sz_dict.values())) - config["display_image_keys"] = np.asarray(list(sz_dict.keys()))[arg[-10:]] + # if config["display_image_keys"]: + # if config["display_image_keys"] == "auto": + # sz_dict = {k: len(v) for k, v in final_clickmaps.items()} + # arg = np.argsort(list(sz_dict.values())) + # config["display_image_keys"] = np.asarray(list(sz_dict.keys()))[arg[-10:]] - print("Generating visualizations for display images...") - for img_name in config["display_image_keys"]: - # Find the corresponding heatmap - try: - if output_format == "hdf5": - # Read from HDF5 file - with h5py.File(hdf5_path, 'r') as f: - dataset_name = img_name.replace('/', '_') - if dataset_name in f["clickmaps"]: - hmp = f["clickmaps"][dataset_name][:] - # Also read click count if available - count_path = os.path.join(output_dir, config["experiment_name"], f"{img_name.replace('/', '_')}_count.npy") - click_count = np.load(count_path) if os.path.exists(count_path) else None - else: - print(f"Heatmap not found for {img_name}") - continue - else: - # Read from numpy file - heatmap_path = os.path.join(output_dir, config["experiment_name"], f"{img_name.replace('/', '_')}.npy") - if not os.path.exists(heatmap_path): - print(f"Heatmap not found for {img_name}") - continue + # print("Generating visualizations for display images...") + # for img_name in config["display_image_keys"]: + # # Find the corresponding heatmap + # try: + # if output_format == "hdf5": + # # Read from HDF5 file + # with h5py.File(hdf5_path, 'r') as f: + # dataset_name = img_name.replace('/', '_') + # if dataset_name in f["clickmaps"]: + # hmp = f["clickmaps"][dataset_name][:] + # # Also read click count if available + # count_path = os.path.join(output_dir, config["experiment_name"], f"{img_name.replace('/', '_')}_count.npy") + # click_count = np.load(count_path) if os.path.exists(count_path) else None + # else: + # print(f"Heatmap not found for {img_name}") + # continue + # else: + # # Read from numpy file + # heatmap_path = os.path.join(output_dir, config["experiment_name"], f"{img_name.replace('/', '_')}.npy") + # if not os.path.exists(heatmap_path): + # print(f"Heatmap not found for {img_name}") + # continue - hmp = np.load(heatmap_path) - # Try to load click count - count_path = os.path.join(output_dir, config["experiment_name"], f"{img_name.replace('/', '_')}_count.npy") - click_count = np.load(count_path) if os.path.exists(count_path) else None + # hmp = np.load(heatmap_path) + # # Try to load click count + # count_path = os.path.join(output_dir, config["experiment_name"], f"{img_name.replace('/', '_')}_count.npy") + # click_count = np.load(count_path) if os.path.exists(count_path) else None - # If not found, try the dedicated click counts directory - if click_count is None: - count_path = os.path.join(click_counts_dir, f"{img_name.replace('/', '_')}.npy") - click_count = np.load(count_path) if os.path.exists(count_path) else None + # # If not found, try the dedicated click counts directory + # if click_count is None: + # count_path = os.path.join(click_counts_dir, f"{img_name.replace('/', '_')}.npy") + # click_count = np.load(count_path) if os.path.exists(count_path) else None - # Load image - if os.path.exists(os.path.join(config["image_path"], img_name)): - img = Image.open(os.path.join(config["image_path"], img_name)) - elif os.path.exists(os.path.join(config["image_path"].replace(config["file_inclusion_filter"] + os.path.sep, ""), img_name)): - img = Image.open(os.path.join(config["image_path"].replace(config["file_inclusion_filter"] + os.path.sep, ""), img_name)) - elif os.path.exists(os.path.join(config["image_path"].replace(config["file_inclusion_filter"], ""), img_name)): - img = Image.open(os.path.join(config["image_path"].replace(config["file_inclusion_filter"], ""), img_name)) - else: - print(f"Image not found for {img_name}") - continue + # # Load image + # if os.path.exists(os.path.join(config["image_path"], img_name)): + # img = Image.open(os.path.join(config["image_path"], img_name)) + # elif os.path.exists(os.path.join(config["image_path"].replace(config["file_inclusion_filter"] + os.path.sep, ""), img_name)): + # img = Image.open(os.path.join(config["image_path"].replace(config["file_inclusion_filter"] + os.path.sep, ""), img_name)) + # elif os.path.exists(os.path.join(config["image_path"].replace(config["file_inclusion_filter"], ""), img_name)): + # img = Image.open(os.path.join(config["image_path"].replace(config["file_inclusion_filter"], ""), img_name)) + # else: + # print(f"Image not found for {img_name}") + # continue - if metadata: - click_match = [k_ for k_ in final_clickmaps.keys() if img_name in k_] - if click_match: - metadata_size = metadata[click_match[0]] - img = img.resize(metadata_size) + # if metadata: + # click_match = [k_ for k_ in final_clickmaps.keys() if img_name in k_] + # if click_match: + # metadata_size = metadata[click_match[0]] + # img = img.resize(metadata_size) - # Save visualization - f = plt.figure() - plt.subplot(1, 2, 1) - plt.imshow(np.asarray(img)) - title = f"{img_name}" - if click_count is not None: - title += f"\nTotal clicks: {click_count}" - plt.title(title) - plt.axis("off") - plt.subplot(1, 2, 2) - plt.imshow(hmp.mean(0)) - plt.axis("off") - plt.savefig(os.path.join(image_output_dir, img_name.replace('/', '_'))) - plt.close() - except Exception as e: - print(f"Error processing {img_name}: {str(e)}") - continue + # # Save visualization + # f = plt.figure() + # plt.subplot(1, 2, 1) + # plt.imshow(np.asarray(img)) + # title = f"{img_name}" + # if click_count is not None: + # title += f"\nTotal clicks: {click_count}" + # plt.title(title) + # plt.axis("off") + # plt.subplot(1, 2, 2) + # plt.imshow(hmp.mean(0)) + # plt.axis("off") + # plt.savefig(os.path.join(image_output_dir, img_name.replace('/', '_'))) + # plt.close() + # except Exception as e: + # print(f"Error processing {img_name}: {str(e)}") + # continue # End profiling if it was enabled if args.profile: profiler.disable() diff --git a/scripts/compute_floor.sh b/scripts/compute_ceiling_floor.sh similarity index 89% rename from scripts/compute_floor.sh rename to scripts/compute_ceiling_floor.sh index fdd119c..8742968 100644 --- a/scripts/compute_floor.sh +++ b/scripts/compute_ceiling_floor.sh @@ -1,5 +1,5 @@ #!/bin/bash -#SBATCH -J ImgNet_Floor +#SBATCH -J ImgNet_Ceiling_Floor #SBATCH -N 1-1 #SBATCH -n 16 #SBATCH -t 96:00:00 diff --git a/src/utils.py b/src/utils.py index 422dc9a..1501b5c 100644 --- a/src/utils.py +++ b/src/utils.py @@ -458,740 +458,6 @@ def convolve(heatmap, kernel, double_conv=False, device='cpu'): return blurred_heatmap.to(device) # [0] -def process_single_image(image_key, image_trials, image_shape, blur_size, blur_sigma, - min_pixels, min_subjects, center_crop, metadata, blur_sigma_function, - kernel_type, duplicate_thresh, max_kernel_size, blur_kernel, - create_clickmap_func, fast_duplicate_detection, device='cpu'): - """Helper function to process a single image for parallel processing""" - - # Process metadata and create clickmaps - if metadata is not None: - if image_key not in metadata: - # Use provided create_clickmap_func - clickmaps = np.asarray([create_clickmap_func([trials], image_shape) for trials in image_trials]) - clickmaps = torch.from_numpy(clickmaps).float().unsqueeze(1).to(device) - if kernel_type == "gaussian": - clickmaps = convolve(clickmaps, blur_kernel) - elif kernel_type == "circle": - clickmaps = convolve(clickmaps, blur_kernel, double_conv=True) - else: - native_size = metadata[image_key] - short_side = min(native_size) - scale = short_side / min(image_shape) - adj_blur_size = int(np.round(blur_size * scale)) - if not adj_blur_size % 2: - adj_blur_size += 1 - adj_blur_size = min(adj_blur_size, max_kernel_size) - adj_blur_sigma = blur_sigma_function(adj_blur_size) - # Use provided create_clickmap_func - clickmaps = np.asarray([create_clickmap_func([trials], native_size[::-1]) for trials in image_trials]) - clickmaps = torch.from_numpy(clickmaps).float().unsqueeze(1).to(device) - if kernel_type == "gaussian": - adj_blur_kernel = gaussian_kernel(adj_blur_size, adj_blur_sigma, device) - clickmaps = convolve(clickmaps, adj_blur_kernel) - elif kernel_type == "circle": - adj_blur_kernel = circle_kernel(adj_blur_size, adj_blur_sigma, device) - clickmaps = convolve(clickmaps, adj_blur_kernel, double_conv=True) - else: - # Use provided create_clickmap_func - clickmaps = np.asarray([create_clickmap_func([trials], image_shape) for trials in image_trials]) - clickmaps = torch.from_numpy(clickmaps).float().unsqueeze(1).to(device) - if kernel_type == "gaussian": - clickmaps = convolve(clickmaps, blur_kernel) - elif kernel_type == "circle": - clickmaps = convolve(clickmaps, blur_kernel, double_conv=True) - - if center_crop: - clickmaps = tvF.resize(clickmaps, min(image_shape)) - clickmaps = tvF.center_crop(clickmaps, center_crop) - clickmaps = clickmaps.squeeze().numpy() - - # Filter processing - if len(clickmaps.shape) == 2: # Single map - return None - - # Filter 1: Remove empties - empty_check = (clickmaps > 0).sum((1, 2)) > min_pixels - clickmaps = clickmaps[empty_check] - if len(clickmaps) < min_subjects: - return None - - # Filter 2: Remove duplicates using provided fast_duplicate_detection - clickmaps_vec = clickmaps.reshape(len(clickmaps), -1) - - # Use the function passed as argument - non_duplicate_indices = fast_duplicate_detection(clickmaps_vec, duplicate_thresh) - clickmaps = clickmaps[non_duplicate_indices] - - # dm = cdist(clickmaps_vec, clickmaps_vec) - # idx = np.tril_indices(len(dm), k=-1) - # lt_dm = dm[idx] - # if np.any(lt_dm < duplicate_thresh): - # remove = np.unique(np.where((dm + np.eye(len(dm)) == 0)))[0] - # rng = np.arange(len(dm)) - # dup_idx = rng[~np.in1d(rng, remove)] - # clickmaps = clickmaps[dup_idx] - - if len(clickmaps) >= min_subjects: - return (image_key, clickmaps) - return None - -def prepare_maps_batched_gpu( - final_clickmaps, - blur_size, - blur_sigma, - image_shape, - min_pixels, - min_subjects, - center_crop, - metadata=None, - blur_sigma_function=None, - kernel_type="circle", - duplicate_thresh=0.01, - max_kernel_size=51, - device='cuda', - batch_size=512, # Reduced from 1024 to 512 - n_jobs=-1, - timeout=600, # Add timeout parameter (10 minutes default) - verbose=True, # Add verbose parameter to control detailed logging - create_clickmap_func=None, - fast_duplicate_detection=None): - """ - Optimized version of prepare_maps that separates CPU and GPU work: - 1. Pre-processes clickmaps in parallel on CPU - 2. Processes batches of blurring on GPU - 3. Post-processes results in parallel on CPU - - Args: - final_clickmaps (list): List of dictionaries mapping image keys to clickmap trials - blur_size (int): Size of the blur kernel - blur_sigma (float): Sigma value for the blur kernel - image_shape (list/tuple): Shape of the images [height, width] - min_pixels (int): Minimum number of pixels for a valid map - min_subjects (int): Minimum number of subjects for a valid map - center_crop (list/tuple): Size for center cropping - metadata (dict, optional): Metadata dictionary. Defaults to None. - blur_sigma_function (function, optional): Function to calculate blur sigma. Required. - kernel_type (str, optional): Type of kernel to use. Defaults to "circle". - duplicate_thresh (float, optional): Threshold for duplicate detection. Defaults to 0.01. - max_kernel_size (int, optional): Maximum kernel size. Defaults to 51. - device (str, optional): Device to use for GPU operations. Defaults to 'cuda'. - batch_size (int, optional): Batch size for GPU processing. Defaults to 512. - n_jobs (int, optional): Number of parallel jobs for CPU operations. Defaults to -1. - timeout (int, optional): Timeout in seconds for parallel jobs. Defaults to 600. - verbose (bool): Whether to show detailed progress logging - create_clickmap_func (function): Function to create the initial clickmap - fast_duplicate_detection (function): Function for duplicate detection - - Returns: - tuple: (new_final_clickmaps, all_clickmaps, categories, keep_index) - """ - import torch - import torch.nn.functional as F - from joblib import Parallel, delayed - from scipy.spatial.distance import cdist - import numpy as np - from tqdm import tqdm - import torchvision.transforms.functional as tvF - - assert blur_sigma_function is not None, "Blur sigma function not passed." - # Check if functions were passed - assert create_clickmap_func is not None, "create_clickmap function must be provided." - assert fast_duplicate_detection is not None, "fast_duplicate_detection function must be provided." - - # Step 1: Create kernels on GPU - if kernel_type == "gaussian": - blur_kernel = gaussian_kernel(blur_size, blur_sigma, device) - elif kernel_type == "circle": - blur_kernel = circle_kernel(blur_size, blur_sigma, device) - else: - raise NotImplementedError(kernel_type) - - # We'll store all results here - all_final_results = { - 'all_clickmaps': [], - 'categories': [], - 'keep_index': [], - 'new_final_clickmaps': {} - } - - # FIX: More carefully merge dictionaries to avoid mixing maps from different images - # We use a dict to track the source of each clickmap to ensure we're not mixing maps - merged_clickmaps = {} - image_sources = {} # Track which dict each image came from - map_counts_before = {} # Track number of maps before merging - map_counts_after = {} # Track number of maps after merging - - for dict_idx, clickmap_dict in enumerate(final_clickmaps): - # Count maps in this dictionary - for image_key, maps in clickmap_dict.items(): - if image_key not in map_counts_before: - map_counts_before[image_key] = 0 - map_counts_before[image_key] += len(maps) - - for image_key, maps in clickmap_dict.items(): - if image_key in merged_clickmaps: - # If this image already exists, we need to make sure we're not - # accidentally mixing maps from different images - print(f"Warning: Image {image_key} found in multiple dictionaries. Combining maps.") - # Append maps while preserving the source tracking - merged_clickmaps[image_key].extend(maps) - if isinstance(image_sources[image_key], list): - image_sources[image_key].append(dict_idx) - else: - image_sources[image_key] = [image_sources[image_key], dict_idx] - else: - # First occurrence of this image - merged_clickmaps[image_key] = maps - image_sources[image_key] = dict_idx - - # Count maps after merging - for image_key, maps in merged_clickmaps.items(): - map_counts_after[image_key] = len(maps) - - # Log if we found any duplicate keys across dictionaries - duplicate_keys = [k for k, v in image_sources.items() if isinstance(v, list)] - if duplicate_keys: - print(f"Found {len(duplicate_keys)} images with maps in multiple dictionaries. These have been properly combined.") - - # Extra verification in verbose mode - if verbose: - print("\nVerification of map combining:") - for key in duplicate_keys: - if map_counts_before[key] != map_counts_after[key]: - print(f" ERROR: Map count mismatch for {key}: Before={map_counts_before[key]}, After={map_counts_after[key]}") - else: - print(f" OK: Successfully combined {map_counts_after[key]} maps for {key}") - print("") - - # Step 2: Get all keys and prepare for batch processing - all_keys = list(merged_clickmaps.keys()) - total_images = len(all_keys) - - # Calculate number of batches based on total unique images - # Set more conservative batch sizes for stability - # cpu_batch_size = min(batch_size, 5000) # Cap at 5000 for stability - cpu_batch_size = batch_size - num_cpu_batches = (total_images + cpu_batch_size - 1) // cpu_batch_size - effective_n_jobs = min(n_jobs if n_jobs > 0 else os.cpu_count(), os.cpu_count(), 16) # Cap at 16 workers - - print(f"Processing {total_images} unique images in {num_cpu_batches} CPU batches (GPU batch size: {batch_size})...") - print(f"Using {effective_n_jobs} parallel jobs for CPU pre/post-processing.") - - # Process each batch of images - processed_count = 0 - with tqdm(total=total_images, desc="Processing Image Batches") as pbar: - for cpu_batch_idx in range(num_cpu_batches): - batch_start = cpu_batch_idx * cpu_batch_size - batch_end = min(batch_start + cpu_batch_size, total_images) - - # print(f"\n│ ├─ Processing CPU batch {cpu_batch_idx+1}/{num_cpu_batches} (images {batch_start}-{batch_end})...") - - # Get keys for this batch - batch_keys = all_keys[batch_start:batch_end] - - # Step 3: Pre-process only this batch of clickmaps in parallel on CPU - # print(f"│ │ ├─ Pre-processing clickmaps on CPU (parallel, n_jobs={effective_n_jobs})...") - - def preprocess_clickmap(image_key, image_trials, image_shape, metadata=None): - """Helper function to pre-process a clickmap before GPU processing""" - # Process metadata and create clickmaps (creates binary maps, no blurring) - # Ensure image_shape is a tuple as required by create_clickmap_func - image_shape_tuple = tuple(image_shape) if isinstance(image_shape, list) else image_shape - - if metadata is not None and image_key in metadata: - native_size = metadata[image_key] - # Use provided create_clickmap_func - clickmaps = np.asarray([create_clickmap_func([trials], native_size[::-1]) for trials in image_trials]) - return { - 'key': image_key, - 'clickmaps': clickmaps, - 'native_size': native_size if image_key in metadata else None - } - else: - # Use provided create_clickmap_func - clickmaps = np.asarray([create_clickmap_func([trials], image_shape_tuple) for trials in image_trials]) - return { - 'key': image_key, - 'clickmaps': clickmaps, - 'native_size': None - } - - # Use parallel processing for pre-processing only this batch - # Set a timeout for workers to avoid indefinite hanging - preprocessed = Parallel(n_jobs=effective_n_jobs, timeout=timeout)( - delayed(preprocess_clickmap)( - key, - merged_clickmaps[key], - image_shape, - metadata - ) for key in tqdm(batch_keys, desc="Pre-processing", total=len(batch_keys), leave=False) - ) - - # Only keep non-empty preprocessed data - preprocessed = [p for p in preprocessed if p is not None and len(p['clickmaps']) > 0] - - # Step 4: Process GPU blurring - # print(f"│ │ ├─ Processing blurring on GPU (batch_size={batch_size})...") - - # Process in smaller GPU sub-batches to prevent OOM errors - gpu_batch_size = batch_size # min(batch_size, 256) # Cap at 256 to prevent OOM errors - batch_results = [] - - # Flatten the list of clickmaps for efficient GPU batching - gpu_processing_list = [] - for item in preprocessed: - # Each item in preprocessed has a list of clickmaps for one image - # We need to process each clickmap individually on the GPU eventually - gpu_processing_list.append(item) - - # Process GPU batches with a progress bar - total_gpu_batches = (len(gpu_processing_list) + gpu_batch_size - 1) // gpu_batch_size - if verbose: - print(f"Processing {len(gpu_processing_list)} images in {total_gpu_batches} GPU batches (size: {gpu_batch_size})...") - - with tqdm(total=total_gpu_batches, desc="GPU batches", leave=False) as gpu_batch_pbar: - for gpu_batch_idx in range(0, len(gpu_processing_list), gpu_batch_size): - - # Log current batch information - batch_start = gpu_batch_idx - batch_end = min(gpu_batch_idx + gpu_batch_size, len(gpu_processing_list)) - current_batch_size = batch_end - batch_start - - # Get smaller sub-batch to process - gpu_batch_items = gpu_processing_list[gpu_batch_idx : gpu_batch_idx + gpu_batch_size] - - # Skip empty batches - if not gpu_batch_items: - gpu_batch_pbar.update(1) - continue - - # Log tensor preparation step - if verbose: - print(f" │ ├─ Preparing tensors for {len(gpu_batch_items)} images...") - - # Prepare tensors for this GPU batch - tensors_to_blur = [] - metadata_for_batch = [] - keys_for_batch = [] - map_counts = [] # Track how many maps belong to each original image key - - for item in gpu_batch_items: - key = item['key'] - clickmaps_np = item['clickmaps'] - native_size = item['native_size'] - - # Convert numpy arrays to PyTorch tensors - # Important: Keep track of how many maps belong to this key - num_maps_for_key = len(clickmaps_np) - if num_maps_for_key > 0: - clickmaps_tensor = torch.from_numpy(clickmaps_np).float().unsqueeze(1).to(device) - tensors_to_blur.append(clickmaps_tensor) - # Repeat metadata for each map belonging to this key - metadata_for_batch.extend([(key, native_size)] * num_maps_for_key) - keys_for_batch.extend([key] * num_maps_for_key) - map_counts.append(num_maps_for_key) - - if not tensors_to_blur: - if verbose: - print(f" │ ├─ No valid tensors to process, skipping batch") - gpu_batch_pbar.update(1) - continue - - # Log batch tensor creation - if verbose: - print(f" │ ├─ Concatenating {len(tensors_to_blur)} tensors with {sum(map_counts)} total maps...") - - # Concatenate tensors for efficient batch processing - batch_tensor = torch.cat(tensors_to_blur, dim=0) - - # Log tensor shape for debugging - if verbose: - print(f" │ ├─ Batch tensor shape: {batch_tensor.shape}, processing blurring...") - - # Clear up memory - del tensors_to_blur - torch.cuda.empty_cache() - - # Apply blurring (needs to handle potential metadata variations within batch) - blurred_batch = torch.zeros_like(batch_tensor) - current_idx = 0 - - # Apply blurring with a more memory-efficient approach - sub_batch_size = 100 # Process in small sub-batches for stability - if verbose and len(gpu_batch_items) > 1: - print(f" │ ├─ Processing {len(gpu_batch_items)} image items in batches of {sub_batch_size}...") - - for item_idx, item in enumerate(tqdm(gpu_batch_items, desc="Blurring items", leave=False, disable=not verbose)): - # Apply blurring based on the specific item's metadata - key = item['key'] - num_maps = len(item['clickmaps']) - native_size = item['native_size'] - - if num_maps == 0: - continue - - item_tensor = batch_tensor[current_idx : current_idx + num_maps] - import pdb; pdb.set_trace() - try: - # Process with proper error handling - if native_size is not None: - short_side = min(native_size) - scale = short_side / min(image_shape) - adj_blur_size = int(np.round(blur_size * scale)) - if not adj_blur_size % 2: - adj_blur_size += 1 - adj_blur_size = min(adj_blur_size, max_kernel_size) - adj_blur_sigma = blur_sigma_function(adj_blur_size) - - if kernel_type == "gaussian": - adj_blur_kernel = gaussian_kernel(adj_blur_size, adj_blur_sigma, device) - blurred_item = convolve(item_tensor, adj_blur_kernel) - elif kernel_type == "circle": - adj_blur_kernel = circle_kernel(adj_blur_size, adj_blur_sigma, device) - blurred_item = convolve(item_tensor, adj_blur_kernel, double_conv=True) - - # Free memory for next iteration - if 'adj_blur_kernel' in locals(): - del adj_blur_kernel - else: - # Use the standard kernel - if kernel_type == "gaussian": - blurred_item = convolve(item_tensor, blur_kernel) - elif kernel_type == "circle": - blurred_item = convolve(item_tensor, blur_kernel, double_conv=True) - - blurred_batch[current_idx : current_idx + num_maps] = blurred_item - - # Free memory - del blurred_item - except Exception as e: - if verbose: - print(f" │ ├─ ERROR processing item {item_idx} (key: {key}): {e}") - # In case of error, just keep original - blurred_batch[current_idx : current_idx + num_maps] = item_tensor - - current_idx += num_maps - - # Periodically clear cache - if item_idx % 50 == 0: - torch.cuda.empty_cache() - - # Log center crop info if applicable - if center_crop and verbose: - print(f" │ ├─ Applying center crop from {blurred_batch.shape[-2:]} to {center_crop}...") - - # Apply center crop if needed (applied to the whole batch) - if center_crop: - # Resize first if dimensions are different - if blurred_batch.shape[-2:] != image_shape: - blurred_batch = tvF.resize(blurred_batch, list(image_shape), antialias=True) - blurred_batch = tvF.center_crop(blurred_batch, list(center_crop)) - - # Log conversion to numpy - if verbose: - print(f" │ ├─ Converting to numpy and organizing results...") - - # Convert back to numpy and store results indexed by key - processed_maps_np = blurred_batch.squeeze(1).cpu().numpy() - - # Reconstruct the results grouped by image key - start_idx = 0 - item_idx = 0 - while start_idx < len(processed_maps_np): - key = keys_for_batch[start_idx] - num_maps = map_counts[item_idx] - end_idx = start_idx + num_maps - batch_results.append({ - 'key': key, - 'clickmaps': processed_maps_np[start_idx:end_idx] - }) - start_idx = end_idx - item_idx += 1 - - # Log memory cleanup - if verbose: - print(f" │ └─ Cleaning up memory...") - - # Free GPU memory - del batch_tensor, blurred_batch, item_tensor - if 'adj_blur_kernel' in locals(): del adj_blur_kernel - # check_gpu_memory_usage(threshold=0.5, force_cleanup=True) - - # Add a small delay to allow system to stabilize - import time - time.sleep(0.1) - - # Update GPU batch progress bar - gpu_batch_pbar.update(1) - - - # Add post-processing progress logging - if verbose: - print(f"Post-processing {len(batch_results)} results...") - - # Use parallel processing for post-processing with timeout - post_results = Parallel(n_jobs=effective_n_jobs, timeout=timeout)( - delayed(postprocess_clickmap)( - batch_results[i], - min_pixels, - min_subjects, - duplicate_thresh - ) for i in tqdm(range(len(batch_results)), desc="Post-processing", disable=not verbose) - ) - - # Step 6: Compile final results for this batch - for result in post_results: - if result is not None: - image_key, clickmaps = result - category = image_key.split("/")[0] - - all_final_results['all_clickmaps'].append(clickmaps) - all_final_results['categories'].append(category) - all_final_results['keep_index'].append(image_key) - all_final_results['new_final_clickmaps'][image_key] = merged_clickmaps[image_key] - - # Update progress bar - pbar.update(len(batch_keys)) - processed_count += len(batch_keys) - - # Free memory - del preprocessed, batch_results, post_results - if 'gpu_processing_list' in locals(): del gpu_processing_list - torch.cuda.empty_cache() - - # # Final cleanup before returning - # check_gpu_memory_usage(threshold=0.0, force_cleanup=True) - - print(f"\nFinished processing {processed_count} images.") - # Return combined results - return ( - all_final_results['new_final_clickmaps'], - all_final_results['all_clickmaps'], - all_final_results['categories'], - all_final_results['keep_index'] - ) - -# Custom wrapper for prepare_maps_batched_gpu with progress display -def prepare_maps_with_gpu_batching(final_clickmaps, **kwargs): - """ - Wrapper for prepare_maps_batched_gpu that displays progress and follows - the same signature as prepare_maps_with_progress for easy swapping. - - This version optimizes processing by: - 1. Pre-processing clickmaps in parallel on CPU - 2. Processing batches of blurring operations on GPU - 3. Post-processing results in parallel on CPU - - Args: - final_clickmaps (list): List of dictionaries mapping image keys to clickmap trials - **kwargs: Additional arguments to pass to prepare_maps_batched_gpu - - Returns: - tuple: (new_final_clickmaps, all_clickmaps, categories, keep_index) - """ - batch_size = kwargs.pop('batch_size', 512) # Default batch size of 512 - verbose = kwargs.pop('verbose', True) # Add verbose parameter, default to True - - # Display more information if verbose - if verbose: - print(f"│ ├─ Processing with GPU-optimized batching (batch_size={batch_size})...") - - # Pass the required functions from kwargs - create_clickmap_func = kwargs.get('create_clickmap_func') - fast_duplicate_detection = kwargs.get('fast_duplicate_detection') - - return prepare_maps_batched_gpu( - final_clickmaps=final_clickmaps, - batch_size=batch_size, - verbose=verbose, # Pass verbose parameter - create_clickmap_func=create_clickmap_func, - fast_duplicate_detection=fast_duplicate_detection, - **{k: v for k, v in kwargs.items() if k not in ('create_clickmap_func', 'fast_duplicate_detection', 'batch_size', 'verbose')} - ) - -# GPU-accelerated correlation metrics -def compute_AUC_gpu(pred, target, device='cuda'): - """ - GPU-accelerated implementation of AUC score computation. - - Args: - pred (np.ndarray): Predicted heatmap - target (np.ndarray): Target heatmap - device (str): Device to run computation on ('cuda' or 'cpu') - - Returns: - float: AUC score - """ - import torch - from sklearn import metrics - - # Flatten arrays - pred_flat = pred.flatten() - target_flat = target.flatten() - - # Convert to PyTorch tensors - pred_tensor = torch.tensor(pred_flat, device=device) - target_tensor = torch.tensor(target_flat, device=device) - - # Create a binary mask of non-zero target pixels - mask = target_tensor > 0 - - # If no positive pixels, return 0.5 (random chance) - if not torch.any(mask): - return 0.5 - - # Get masked predictions and binary ground truth - masked_pred = pred_tensor[mask].cpu().numpy() - masked_target = torch.ones_like(target_tensor[mask]).cpu().numpy() - - # Get an equal number of negative samples - neg_mask = ~mask - if torch.sum(neg_mask) > 0: - # Select same number of negative samples as positive ones - num_pos = torch.sum(mask).item() - num_neg = min(num_pos, torch.sum(neg_mask).item()) - - # Get indices of negative samples - neg_indices = torch.nonzero(neg_mask).squeeze() - if neg_indices.numel() > 0: - if neg_indices.numel() > num_neg: - # Random sample negative indices if we have more than we need - perm = torch.randperm(neg_indices.numel(), device=device) - neg_indices = neg_indices[perm[:num_neg]] - - # Get predictions for negative samples and set target to 0 - neg_pred = pred_tensor[neg_indices].cpu().numpy() - neg_target = torch.zeros(neg_indices.numel()).numpy() - - # Combine positive and negative samples - masked_pred = np.concatenate([masked_pred, neg_pred]) - masked_target = np.concatenate([masked_target, neg_target]) - - # Compute AUC score - try: - return metrics.roc_auc_score(masked_target, masked_pred) - except ValueError: - # In case of errors, fallback to 0.5 - return 0.5 - -def compute_spearman_correlation_gpu(pred, target, device='cuda'): - """ - GPU-accelerated implementation of Spearman correlation computation. - - Args: - pred (np.ndarray): Predicted heatmap - target (np.ndarray): Target heatmap - device (str): Device to run computation on ('cuda' or 'cpu') - - Returns: - float: Spearman correlation coefficient - """ - import torch - - # Flatten arrays - pred_flat = pred.flatten() - target_flat = target.flatten() - - # Convert to PyTorch tensors - pred_tensor = torch.tensor(pred_flat, device=device) - target_tensor = torch.tensor(target_flat, device=device) - - # Compute ranks - pred_rank = torch.argsort(torch.argsort(pred_tensor)).float() - target_rank = torch.argsort(torch.argsort(target_tensor)).float() - - # Compute mean ranks - pred_mean = torch.mean(pred_rank) - target_mean = torch.mean(target_rank) - - # Compute numerator and denominator - numerator = torch.sum((pred_rank - pred_mean) * (target_rank - target_mean)) - denominator = torch.sqrt(torch.sum((pred_rank - pred_mean)**2) * torch.sum((target_rank - target_mean)**2)) - - # Compute correlation - if denominator > 0: - correlation = numerator / denominator - return correlation.cpu().item() - else: - return 0.0 - -def compute_crossentropy_gpu(pred, target, eps=1e-10, device='cuda'): - """ - GPU-accelerated implementation of cross-entropy computation. - - Args: - pred (np.ndarray): Predicted heatmap - target (np.ndarray): Target heatmap - eps (float): Small value to avoid numerical issues - device (str): Device to run computation on ('cuda' or 'cpu') - - Returns: - float: Cross-entropy loss - """ - import torch - import torch.nn.functional as F - - # Convert to PyTorch tensors - pred_tensor = torch.tensor(pred, device=device).float() - target_tensor = torch.tensor(target, device=device).float() - - # Normalize target to sum to 1 - target_sum = torch.sum(target_tensor) - if target_sum > 0: - target_tensor = target_tensor / target_sum - - # Normalize prediction to sum to 1 - pred_sum = torch.sum(pred_tensor) - if pred_sum > 0: - pred_tensor = pred_tensor / pred_sum - - # Add small epsilon to avoid log(0) - pred_tensor = torch.clamp(pred_tensor, min=eps) - - # Compute cross-entropy loss - loss = -torch.sum(target_tensor * torch.log(pred_tensor)) - - return loss.cpu().item() - -# Function to process a batch of correlation computations on GPU -def batch_compute_correlations_gpu(test_maps, reference_maps, metric='auc', device='cuda'): - """ - Process a batch of correlation computations on GPU for improved performance. - For Spearman correlation, uses scipy's implementation instead of the GPU version - - Args: - test_maps (list): List of test maps - reference_maps (list): List of reference maps - metric (str): Metric to use ('auc', 'spearman', 'crossentropy') - device (str): Device to run computation on ('cuda' or 'cpu') - - Returns: - list: Correlation scores for each pair of maps - """ - assert len(test_maps) == len(reference_maps), "Number of test and reference maps must match" - from scipy.stats import spearmanr - - results = [] - for test_map, reference_map in zip(test_maps, reference_maps): - # Normalize maps - test_map = (test_map - test_map.min()) / (test_map.max() - test_map.min() + 1e-10) - reference_map = (reference_map - reference_map.min()) / (reference_map.max() - reference_map.min() + 1e-10) - - # Compute correlation using appropriate function - if metric.lower() == 'auc': - score = compute_AUC_gpu(test_map, reference_map, device) - elif metric.lower() == 'spearman': - # Use scipy's spearman implementation instead of GPU version - score, _ = spearmanr(test_map.flatten(), reference_map.flatten()) - elif metric.lower() == 'crossentropy': - score = compute_crossentropy_gpu(test_map, reference_map, device) - else: - raise ValueError(f"Unsupported metric: {metric}") - - results.append(score) - - return results def save_single_clickmap(all_clickmaps, idx, img_name, image_path, file_inclusion_filter=None, save_dir=None): """Helper function to save a single clickmap""" @@ -1801,7 +1067,9 @@ def process_all_maps_multi_thresh_gpu( clickmap_bins[key] = np.asarray(bin_counts) # Add to all_clickmaps with the appropriate method if save_to_disk: + key = key.replace('/', '_') temp_group.create_dataset(key, data=np.stack(bin_clickmaps, axis=0)) + elif return_before_blur: bin_clickmaps = np.stack(bin_clickmaps, axis=0) if max_subjects > 0: diff --git a/tools/find_top_bottom.py b/tools/find_top_bottom.py index 69647ab..0530610 100644 --- a/tools/find_top_bottom.py +++ b/tools/find_top_bottom.py @@ -38,15 +38,15 @@ def plot_clickmap(img, hmp, score, num_subjects, img_name, image_output_dir): return if __name__ == "__main__": - scores_json = "assets/exp_30_subjects_08_27_2025_spearman_ceiling_floor_results.json" + scores_json = "assets/exp/exp_30_subjects_08_27_2025_spearman_ceiling_floor_results.json" data_root = "/gpfs/data/shared/imagenet/ILSVRC2012/val" image_output_dir = "temp/top_bot_imgs_30" os.makedirs(image_output_dir, exist_ok=True) with open(scores_json, 'r') as f: scores_dict = json.load(f)['all_img_ceilings'] metadata = np.load("image_metadata/jay_imagenet_val_04_30_2025_dimensions.npy", allow_pickle=True).item() - val_map_files = ['assets/imgnet_val/jay_imagenet_val_08_27_2025_batch001.h5', 'assets/imgnet_val/jay_imagenet_val_08_27_2025_batch002.h5', - 'assets/imgnet_val/jay_imagenet_val_08_27_2025_batch003.h5', 'assets/imgnet_val/jay_imagenet_val_08_27_2025_batch004.h5'] + val_map_files = ['assets/imgnet/jay_imagenet_val_08_27_2025_batch001.h5', 'assets/imgnet/jay_imagenet_val_08_27_2025_batch002.h5', + 'assets/imgnet/jay_imagenet_val_08_27_2025_batch003.h5', 'assets/imgnet/jay_imagenet_val_08_27_2025_batch004.h5'] top10 = dict(sorted(scores_dict.items(), key=lambda x: x[1], reverse=True)[:10]) bot10 = dict(sorted(scores_dict.items(), key=lambda x: x[1], reverse=False)[:10]) top10_maps = {} @@ -54,13 +54,16 @@ def plot_clickmap(img, hmp, score, num_subjects, img_name, image_output_dir): for map_file in val_map_files: map_content = h5py.File(map_file, 'r')['clickmaps'] for img_name in top10: + img_key = img_name.split('/')[1] img_name = img_name.replace('/', '_') if img_name in map_content: - top10_maps[img_name] = map_content[img_name]['clickmap'][:].mean(0) + top10_maps[img_key] = map_content[img_name]['clickmap'][:].mean(0) for bot_img_name in bot10: + print(bot_img_name) + img_key = bot_img_name.split('/')[1] bot_img_name = bot_img_name.replace('/', '_') if bot_img_name in map_content: - bot10_maps[img_name] = map_content[bot_img_name]['clickmap'][:].mean(0) + bot10_maps[img_key] = map_content[bot_img_name]['clickmap'][:].mean(0) top10_paths = [] bot10_paths = [] diff --git a/tools/verify_list.py b/tools/verify_list.py new file mode 100644 index 0000000..2c52c3a --- /dev/null +++ b/tools/verify_list.py @@ -0,0 +1,13 @@ +import json +import os + +if __name__ == "__main__": + img_list = "clickme_datasets/missing_imgnet_train.json" + imgnet_path = "/gpfs/data/shared/imagenet/ILSVRC2012/train" + with open(img_list, 'r') as f: + json_content = json.load(f) + + for img_cls, img_list in json_content.items(): + for img in img_list: + if not os.path.exists(os.path.join(imgnet_path, img_cls, img)): + print("Missing", img) \ No newline at end of file