Skip to content

Commit 3755945

Browse files
committed
Prune oldest ECR images if repository gets too large
1 parent d37312a commit 3755945

1 file changed

Lines changed: 60 additions & 9 deletions

File tree

.github/workflows/ci.yml

Lines changed: 60 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -48,9 +48,9 @@ jobs:
4848
- name: Determine version
4949
id: version
5050
run: |
51-
set -e
52-
VERSION="$(date -u '+%Y%m%d-%H%M%S')"
53-
echo "version=$VERSION" >> "$GITHUB_OUTPUT"
51+
set -e;
52+
VERSION="$(date -u '+%Y%m%d-%H%M%S')";
53+
echo "version=$VERSION" >> "$GITHUB_OUTPUT";
5454
echo "Version $VERSION";
5555
- name: Checkout
5656
uses: actions/checkout@v6
@@ -67,9 +67,9 @@ jobs:
6767
run: PARALLEL_BUILD=false PARALLEL_E2E=false EXPLICIT_WAIT_TIMEOUT=20000 TEST_TIMEOUT=60000 npm test
6868
- name: Bundle
6969
run: |
70-
cd build
71-
rm -r node_modules
72-
tar -czf ../build.tar.gz .
70+
cd build;
71+
rm -r node_modules;
72+
tar -czf ../build.tar.gz .;
7373
- name: Upload Bundle
7474
uses: actions/upload-artifact@v5
7575
with:
@@ -200,9 +200,60 @@ jobs:
200200
- name: Prune old ECR images
201201
# work around lack of support for lifecycle policies in ECR Public (see https://github.com/aws/containers-roadmap/issues/1268)
202202
run: |
203-
set -e
204-
# temporary: for now, just print out the information from the repository
205-
aws ecr-public describe-images --repository-name refacto --output json
203+
set -e;
204+
cat >task.mjs <<"EOF"
205+
import { json } from 'node:stream/consumers';
206+
import { spawnSync } from 'node:child_process';
207+
208+
const untaggedThresholdTime = Date.now() - 1000 * 60 * 60 * 24; // 1 day
209+
const repositorySizeLimitBytes = 20 * 1024 * 1024 * 1024; // 20GB (ECR Public free tier max across all repositories is 50GB)
210+
211+
const aws = process.argv[2];
212+
const input = await json(process.stdin);
213+
const indexItems = input.imageDetails.filter((item) => item.imageManifestMediaType.includes('image.index.v1'));
214+
console.log(`total index items: ${indexItems.length}`);
215+
216+
for (const item of indexItems) {
217+
item.imagePushedTimestamp = Date.parse(item.imagePushedAt);
218+
}
219+
indexItems.sort((a, b) => b.imagePushedTimestamp - a.imagePushedTimestamp); // sort newest first
220+
221+
let totalSize = 0;
222+
const toDelete = [];
223+
for (const item of indexItems) {
224+
if (!item.imageTags?.length && item.imagePushedTimestamp < untaggedThresholdTime) {
225+
// untagged image index: obvious candidate for deletion
226+
console.log(`untagged image: ${item.imageDigest}`);
227+
toDelete.push(item.imageDigest);
228+
continue;
229+
}
230+
totalSize += item.imageSizeInBytes;
231+
if (item.imageTags?.includes('latest')) {
232+
// never remove image tagged as 'latest'
233+
continue;
234+
}
235+
if (totalSize > repositorySizeLimitBytes) {
236+
// max repository size exceeded: prune oldest images
237+
console.log(`old image: ${item.imageDigest}`);
238+
toDelete.push(item.imageDigest);
239+
}
240+
}
241+
while (toDelete.length > 0) {
242+
const batch = toDelete.splice(0, 100);
243+
console.log('deleting batch:', batch);
244+
const batchFilter = batch.map((digest) => `imageDigest=${digest}`).join(',');
245+
const result = spawnSync(
246+
aws,
247+
['ecr-public', 'batch-delete-image', '--repository-name', 'refacto', '--image-ids', batchFilter],
248+
{ stdio: ['ignore', 'inherit', 'inherit'] },
249+
);
250+
if (result.status !== 0) {
251+
console.log(`batch delete failed (status ${result.status}, signal ${result.signal}, error ${result.error})`);
252+
process.exit(1);
253+
}
254+
}
255+
EOF
256+
aws ecr-public describe-images --repository-name refacto --output json | node task.mjs "$(which aws)";
206257
207258
create_github_release:
208259
needs:

0 commit comments

Comments
 (0)