-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathproject-airogs.html
More file actions
582 lines (498 loc) · 36 KB
/
project-airogs.html
File metadata and controls
582 lines (498 loc) · 36 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
<!DOCTYPE html>
<!-- This site was created in Webflow. http://www.webflow.com -->
<!-- Last Published: Fri Mar 09 2018 17:05:10 GMT+0000 (UTC) -->
<html data-wf-page="59a4444cf0b9de0001da1f74" data-wf-site="59a4444cf0b9de0001da1f73" lang="nl">
<head>
<meta charset="utf-8">
<title>Coen de Vente • AIROGS Challenge Report: AI models can be used for glaucoma screening, but do they know when they cannot?</title>
<meta property="og:title" content="Coen de Vente • AIROGS Challenge Report: AI models can be used for glaucoma screening, but do they know when they cannot?" />
<meta content="width=device-width, initial-scale=1" name="viewport">
<meta content="Webflow" name="generator">
<link href="css/normalize.css?1747294411" rel="stylesheet" type="text/css">
<link href="css/webflow.css?1747294411" rel="stylesheet" type="text/css">
<link href="css/websify.webflow.css?1747294411" rel="stylesheet" type="text/css">
<script src="https://ajax.googleapis.com/ajax/libs/webfont/1.4.7/webfont.js" type="text/javascript"></script>
<script type="text/javascript">
WebFont.load({
google: {
families: ["Roboto:100,100italic,300,300italic,regular,italic,500,500italic,700,700italic,900,900italic", "Raleway:100,100italic,200,200italic,300,300italic,regular,italic,500,500italic,600,600italic,700,700italic,800,800italic,900,900italic"]
}
});
</script>
<!-- [if lt IE 9]><script src="https://cdnjs.cloudflare.com/ajax/libs/html5shiv/3.7.3/html5shiv.min.js" type="text/javascript"></script><![endif] -->
<script type="text/javascript">
! function(o, c) {
var n = c.documentElement,
t = " w-mod-";
n.className += t + "js", ("ontouchstart" in o || o.DocumentTouch && c instanceof DocumentTouch) && (n.className += t + "touch")
}(window, document);
</script>
<link href="images/favicon.png" rel="shortcut icon" type="image/x-icon">
<link href="images/favicon.png" rel="apple-touch-icon">
<!-- Global site tag (gtag.js) - Google Analytics -->
<script async src="https://www.googletagmanager.com/gtag/js?id=G-K0837QE6PJ"></script>
<script>
window.dataLayer = window.dataLayer || [];
function gtag(){dataLayer.push(arguments);}
gtag('js', new Date());
gtag('config', 'G-K0837QE6PJ');
</script>
<script src="https://cdn.jsdelivr.net/npm/lazyload@2.0.0-beta.1/lazyload.js"></script>
<meta name="description" content="Hi! I'm Coen, researcher who is enthusiastic about machine learning, computer vision and health.">
<meta property="og:description" content="Hi! I'm Coen, researcher who is enthusiastic about machine learning, computer vision and health.">
</head>
<body class="body">
<div data-collapse="medium" data-animation="default" data-duration="400" data-ix="changecolorscroll" class="navbar colored w-nav">
<div class="container w-container"><a href="index.html" class="brand w-nav-brand">
<div>
<a href="index.html" class="navlink w-nav-link no-text-transform text-logo">Coen de Vente</a>
</div>
<nav role="navigation" class="nav-menu colored w-nav-menu">
<a href="index.html" class="navlink w-nav-link">Home</a>
<a href="projects.html" class="navlink w-nav-link">Posts</a>
<a href="publications.html" class="navlink w-nav-link">Publications</a>
<!-- <a href="cv.html" class="navlink over w-nav-link">CV</a> -->
<a href="about.html" class="navlink over w-nav-link">About</a>
<a href="contact.html" class="navlink w-nav-link">Contact</a>
</nav>
<div class="menu-button w-nav-button">
<div class="colored hamburgericon w-icon-nav-menu"></div>
</div>
</div>
</div><div class="section-9">
<div style="background-image: url('images/airogs/TRAIN002880_thumb.png'); background-size: cover; padding-bottom: 0px; background-position: right;" class="div-block-27 project-title-wrapper">
<div style="height: 100%; display: flex; flex-direction: column; justify-content: flex-end;" class="row-5 w-row whitetext" >
<h1 style="font-size: 40px; " class="heading-5 zwartehead naastmockup">
<div data-ix="flowinslow" class="column-9 w-col w-col-2 whitetext"></div>
<div data-ix="flowinslow" class="column-9 w-col w-col-8 whitetext project-title">
AIROGS Challenge Report: AI models can be used for glaucoma screening, but do they know when they cannot?
<div class="below-title-wrapper">
<div class="publication-date">
<p><img class="clock-icon" src="images/clock.png"> 10 May 2022</p>
</div>
<div class="download-pdf-button">
<a href="files/2302.01738.pdf" target="_blank">
<p>
<img class="download-icon" src="images/download.png">
Preprint (PDF)
</p>
</a>
</div>
</div>
</div>
<div data-ix="flowinslow" class="column-10 w-clearfix w-col w-col-2"></div>
</h1>
</div>
</div>
</div>
<div class="section-8">
<div class="div-block-29">
<div class="div-block-28 bgtarget blurbg"></div>
</div>
<div class="div-block-27 sameheight0source">
<div class="row-5 w-row">
<div data-ix="flowinslow" class="column-10 w-clearfix w-col w-col-2"></div>
<div data-ix="flowinslow" class="column-9 w-col w-col-8">
<p>We recently presented the results of our challenge “AIROGS: Artificial Intelligence for RObust Glaucoma Screening” at ISBI 2022. Participating teams presented their artificial intelligence (AI) solutions for robust glaucoma screening on which they had been working for the past four months. In this blog post, we present the rationale behind the challenge, give an overview of the results and describe our experiences with the Type 2 challenge features that were recently introduced on grand-challenge.org.</p>
<h3>Glaucoma and screening</h3>
<p>Glaucoma is a leading cause of irreversible blindness and impaired vision, <a href="https://pubmed.ncbi.nlm.nih.gov/24974815/">estimated</a> to affect up to 110 million people in 2040. Population studies <a href="https://pubmed.ncbi.nlm.nih.gov/8695555/">showed</a> that over 50% of glaucoma cases are undiagnosed. Moreover, a main cause of blindness is late detection, since the disease can be controlled well when it is diagnosed early on using treatments such as medication, laser and surgery.</p>
<p>Glaucomatous patients can be identified with the use of color fundus photography (CFP). See an example CFP image with the optic disc magnified, below. The analysis of CFP images performed by human experts, however, is a highly costly procedure. AI could increase the cost-effectiveness of glaucoma screening, by reducing the need for this manual labor. Moreover, if automated AI based solutions that can forward glaucomatous patients to ophthalmologists would be integrated in scanners (or even better: smartphone cameras), they could be used at eye shops and opticians without the need for an expert to be present at the site.</p>
<p><img src="images/26ebcfd5-eccb-4a19-968a-6b4b8d35deeb.jpg" width=400/ style="margin-left: calc(50% - 200px); max-width: 100%;" alt="Color fundus image with optic disc pointed out."></p>
<h3>From at-the-lab performance to real-world scenarios</h3>
<p>Testing AI screening solutions in real-world settings is highly important, as illustrated in <a href="https://www.technologyreview.com/2020/04/27/1000658/google-medical-ai-accurate-lab-real-life-clinic-covid-diabetes-retina-disease/">this news article</a>, based on <a href="https://dl.acm.org/doi/abs/10.1145/3313831.3376718">this evaluation study</a>. They described observations when deploying an AI model for diabetic retinopathy screening from CFP images in clinics in Thailand.</p>
<blockquote>
<p><em><strong>"It gives guaranteed results, but it has some limitations. Some images are blurry, and I can still read it, but [the system] can’t."</strong></em> <br>
- A participant of the evaluation study who worked with the AI screening tool.</p>
</blockquote>
<blockquote>
<p><em><strong>"It’s good but I think it’s not as accurate. If [the eye] is a little obscured, it can’t grade it."</strong></em> <br>
- Another participant of the evaluation study who worked with the AI screening tool.</p>
</blockquote>
<p>As illustrated by these quotes, the AI quality check was too strict in the opinion of the camera operators and ophthalmic nurses, causing frustration. Moreover, image quality varied highly across locations, settings and operator experience level. So, in real-world settings, different types of image quality can be expected, depending on where and by whom an image is taken.</p>
<p>Therefore, algorithms should be able to tell when it can grade an image or not, even when it has not seen a certain type of ungradability during training. However, it should also not be unnecessarily strict.</p>
<h3>A heterogeneous, multi-center dataset without ungradable training data</h3>
<p>For the AIROGS challenge, we used a large screening dataset of around 113,000 images from about 60,000 patients and approximately 500 different sites with a heterogeneous ethnicity. We splitted the data in a training set with about 101,000 gradable images (from referable and non-referable glaucomatous eyes) and a closed test set with approximately 11,000 (both gradable and ungradable) images. To encourage the development of methodologies with inherent robustness mechanisms, we did not include ungradable data in the training data, while we did include ungradable data in the test set and evaluated the ability of solutions to distinguish gradable from ungradable images. Furthermore, glaucoma screening performance was assessed by considering the detection performance of referable glaucoma in gradable data.</p>
<p>Labeling was performed by a pool of carefully selected graders. From a larger group of general ophthalmologists, glaucoma specialists, residents in ophthalmology and optometrists who had been trained at optic disc assessment for detecting glaucoma, 89 indicated that they wanted to become a grader for labeling the data set. They were invited for an exam and 32 of them passed with a minimum specificity of 92% and a minimum sensitivity of 85% for detecting glaucoma on fundus photographs.</p>
<p>For each image they were presented with, graders needed to state whether the eye should be referred (referable glaucoma: RG), should not be referred (no referable glaucoma: NRG) or that the image could not be graded (ungradable: U). To ensure a high-quality set of labels, each image was initially graded twice. If the graders agreed, the agreed-on label was the final label. If the graders disagreed, the image was graded by a third grader (one of two experienced glaucoma specialists); that label was the final label. About 2% of all graded images were eventually labeled as ungradable.</p>
<h3>Outcomes</h3>
<p>We were happy to see that people from all over the world joined the AIROGS challenge. 351 participants from 51 countries joined the challenge and 15 teams from 13 countries eventually submitted a solution for the final leaderboard. The world map below depicts for each country how many users joined and submitted to the final leaderboard. Each circle indicates a country and the circle’s size portrays the number of participants from that country.</p>
<p><img src="images/8876d211-9a0a-4fe8-b736-878a83c6f5e5.png" width="100%" alt="Map indicating the number of participants that joined and submitted to the final phase for each country."></p>
<p>Participants were evaluated on two aspects: glaucoma screening performance and robustness. Glaucoma screening performance was measured by the partial area under the receiver operator characteristic curve at 90-100% specificity (pAUC<sub>S</sub>) and the sensitivity at 95% specificity (SE@95SP<sub>S</sub>). We quantified robustness using Cohen's kappa score, i.e. the agreement between the reference and the decisions provided by the challenge participants on image gradability (κ<sub>U</sub>), and the area under the receiver operator characteristic curve with the human reference for ungradability as labels and ungradability scalar values provided by the participants, as the target scores (AUC<sub>U</sub>). A final score, defined as the mean of these four metrics, subsequently determined the final participant ranking.</p>
<p>The best pAUC<sub>S</sub>, SE@95SP<sub>S</sub>, κ<sub>U</sub> and AUC<sub>U</sub> were 0.90, 85%, 0.82 and 0.99, respectively. The four metrics for each participant, along with the final score on the left, are shown here:</p>
<p><img src="images/93836330-271a-47a2-a10f-353aedf85bfc.png" alt="Plot with rankings and four metrics of the challenge for each country."></p>
<p>The receiver operator characteristic curves (ROCs) for glaucoma screening performance and robustness are shown below. The sensitivity and specificity of the graders are plotted as well, showing that the screening performance of the teams was on par with the one of human graders.</p>
<p><img src="images/fa1940be-1aeb-456b-a148-207b51c1dbeb.png" alt="ROC plots of the two challenge tasks."></p>
<p>The winning teams recevied a cash prize and AWS compute credits:</p>
<p><img src="images/19c0292b-b0de-476d-873c-ef0ae5cec722.jpeg"></p>
<h3>What worked?</h3>
<p>Based on the <a href="https://airogs.grand-challenge.org/evaluation/final-test-phase/leaderboard/">method descriptions</a> provided by the team participants, it stood out that a few methodological choices were made predominantly by top performing teams.</p>
<h4>Glaucoma screening performance</h4>
<p>All teams in the top 3 used a vision transformer for referable glaucoma classification, while apart from them, only one other participant did. Furthermore, all prize winners manually labeled the optic disc to train an optic disc segmentation or detection algorithm and used it to crop input images as a pre-processing step. That was only done by two other participants.</p>
<h4>Robustness</h4>
<p>The three winners also all in some way used the confidence of this optic disc detection or segmentation algorithm for ungradabillity detection.</p>
<p>The <a href="https://rumc-gcorg-p-public.s3.amazonaws.com/evaluation-supplementary/644/0df71d89-ffeb-4f26-9820-3e4546afd359/AIROGS_Challenge_Paper_9xMW9vd.pdf">team that scored the highest AUC<sub>U</sub></a> manually labeled the most optic discs, and their ungradability detection approach also included a separate classifier. This classifier was trained to separate gradable images from images that were "more on the side of ungradability" and the training samples were picked manually by selecting “images that seemed most affected by blurring or low-quality depiction of the optic disc”. <a href="https://rumc-gcorg-p-public.s3.amazonaws.com/evaluation-supplementary/644/cb8c2395-c963-4564-a36b-0cbd099f5854/Airogs_fine_my.pdf">The second best AUC<sub>U</sub></a> approach also included the confidence of a vessel segmentation approach.</p>
<p>The three best performing teams in terms of AUC<sub>U</sub> that did not require manual annotations, were ranked <a href="https://rumc-gcorg-p-public.s3.amazonaws.com/evaluation-supplementary/644/d259c884-5eda-4bb9-b9fb-73a29e9eaf41/airogs.pdf">4th</a>, <a href="https://rumc-gcorg-p-public.s3.amazonaws.com/evaluation-supplementary/644/3e9f9bbb-9d22-430f-9448-6a4e21a29f2f/Airogs_Combination_Algorithm.pdf">5th</a> and <a href="https://rumc-gcorg-p-public.s3.amazonaws.com/evaluation-supplementary/644/d79d3e55-505a-416a-b389-0a51170b1271/AIROGS.pdf">6th</a> in terms of AUC<sub>U</sub>. These 4th and 6th ranked teams both used test-time augmentation for ungradability detection. The robustness approach from the 5th ranked team used the reconstruction error of an autoencoder trained on gradable data, weighted by a factor based on their RG classification model that is “highest [...] when the probability is 0.5, which [means the model is] unable to predict, and is lowest when the probability is certain, which is either 0 or 1.”</p>
<h3>Type 2 challenges</h3>
<p>AIROGS was one of the first so-called <a href="https://grand-challenge.org/documentation/type-ii-challenge-setup/">“Type 2” challenges</a> on grand-challenge.org. In such a challenge, participants submit an algorithm, rather than a file with their predictions on the test set, as is done in a <a href="https://grand-challenge.org/documentation/type-i-challenge-setup/">Type 1 challenge</a>. These algorithms are submitted in the form of a <a href="https://www.docker.com/">Docker</a> container. The grand-challenge.org platform runs these algorithms on the private test set, which has a number of advantages. Firstly, this makes manual manipulation of the test set impossible, reducing the possibility of cheating. Secondly, it greatly improves reproducibility.</p>
<p>For Type 1 challenge type, reproducibility was not always guaranteed. Top performing solutions were often not available, or available in a repository that was hard or impossible to get working. For our challenge, participants needed to upload an algorithm that can operate fully automatically and autonomously (internet access is not available when running these algorithms). Therefore, it is guaranteed that each submitted solution has working code behind it. With the single click of a button, AIROGS participants can now open up their solutions for others to use since they already have a working algorithm on grand-challenge.org. A few examples of public AIROGS solutions are already provided by <a href="https://grand-challenge.org/algorithms/airogs-classifier/">Densen Puthussery et al.</a>, <a href="https://grand-challenge.org/algorithms/airogs_ur/">Jónathan Heras et al.</a> and <a href="https://grand-challenge.org/algorithms/base_airgos/">Abdul Qayyum et al.</a> We can now upload new images to these algorithms for further testing by requesting access and clicking the <img src="https://rumc-gcorg-p-public.s3.amazonaws.com/i/2022/04/29/e822874a-0f6e-4b0c-a5d9-9da05a23f6da.png" alt="Try-out algorithm" /> button or <a href="https://grand-challenge.org/documentation/gc-api-algorithms-tutorial/">the API</a>. Or if you don’t have any test data at hand, we can view these algorithms’ results on some publicly available images that were already uploaded before by going to the algorithms’ <img src="https://rumc-gcorg-p-public.s3.amazonaws.com/i/2022/04/29/8dec2aec-182b-44c5-aa9b-edbb009aa904.png" alt="Results" /> pages.</p>
<h3>Final words</h3>
<p>We just reopened the challenge for new submissions. So, although the prizes have already been rewarded, you can still join the challenge and submit your own solutions <a href="https://airogs.grand-challenge.org/">here</a>.</p>
<p>You can also already try out some solutions from participants who made their code and Grand Challenge algorithms publicly available <a href="https://airogs.grand-challenge.org/Home/#final-leaderboard">here</a>.</p>
<p>If you would like to see more results, hear more details about the challenge or the learn more about the methods used by the participants, <a href="https://airogs.grand-challenge.org/evaluation/final-test-phase/leaderboard/">read their method descriptions</a> or watch the AIROGS event at ISBI 2022 here:</p>
<!-- <video class="w-100" controls="" src="files/AIROGS.mp4"></video> -->
<div class="video-container">
<iframe width="100%" src="https://www.youtube.com/embed/uu-cOW-5Ijs" title="YouTube video player" frameborder="0" allow="accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture" allowfullscreen></iframe>
</div>
<p><br>
In conclusion, we presented a challenge based on real-world data for glaucoma screening by CFP. The results are promising, as the teams’ performances were high and many teams exceeded our target of an 80% sensitivity at 95% specificity. We hope the results of this challenge help toward implementing robust AI for glaucoma screening in clinical practice.</p>
<h3>Publication</h3>
<p>Please use this when referring to the post or publication:</p>
<div class="publication-card">
<p><span class='highlight-me'>C. de Vente</span>, K.A. Vermeer, N. Jaccard, H. Wang, H. Sun, F. Khader, D. Truhn, T. Aimyshev, Y. Zhanibekuly, T. Le, A. Galdran, M.Á. González Ballester, G. Carneiro, D.R. G, H.P. S, D. Puthussery, H. Liu, Z. Yang, S. Kondo, S. Kasai, E. Wang, A. Durvasula, J. Heras, M.Á. Zapata, T. Araújo, G. Aresta, H. Bogunović, M. Arikan, Y.C. Lee, H.B. Cho, Y.H. Choi, A. Qayyum, I. Razzak, B. van Ginneken, H.G. Lemij and C.I. Sánchez. "AIROGS: Artificial Intelligence for RObust Glaucoma Screening Challenge", <i>IEEE Transactions on Medical Imaging</i>, 2023;43(1):542-557.</p><div class="publication-button-group"><a data-ix="goupbox" id="publication-modal-vent23a-button" class="knop footerknop movewithmouse w-button publication-button">Cite</a> <a data-ix="goupbox" target="_blank" class="knop footerknop movewithmouse w-button publication-button" href="https://doi.org/10.1109/TMI.2023.3313786">DOI</a> <a data-ix="goupbox" target="_blank" class="knop footerknop movewithmouse w-button publication-button" href="http://www.ncbi.nlm.nih.gov/pubmed/37713220/">PMID</a></div>
</div>
</div>
<div data-ix="flowinslow" class="column-10 w-clearfix w-col w-col-2"></div>
</div>
</div>
</div>
<div class="section-4" style="background-color: #9497B2;">
<div class="row-2 w-row">
<h3 class="heading-5 zwartehead" style="color: white;">Other posts</h3>
</div>
<div class="row-2 w-row">
<div class="column-5 w-col w-col-9 w-col-stack">
<div class="w-dyn-list">
<div class="collection-list colophome w-dyn-items w-row other-projects-list">
<div data-ix="flowinslow" class="collection-item lazyload w-dyn-item w-col w-col-4">
<div data-ix="goupbox" class="templatethumb movewithmouse opvoorkant"
style="background-image: linear-gradient(0deg, rgba(77, 131, 143, 0.8), rgba(77, 131, 143, 0.2)), url('images/visualizing_ai_thumb.png'); background-size: cover; background-position: top;">
<a class="templatethumblink w-inline-block" href="project-uva-interview.html">
<div class="templatefrontwrap">
<div class="templatefront">
<img src="images/magnifying-glass-icon.png" height="20">
</div>
</div>
<img class="thumbimgbg">
</a>
</div>
<h3 class="heading-9"><a href="project-uva-interview.html">An interview on robust AI for medical imaging with the University of Amsterdam</a></h3>
<h4 class="heading-17">16 June 2023</h4>
</div>
<div data-ix="flowinslow" class="collection-item lazyload w-dyn-item w-col w-col-4">
<div data-ix="goupbox" class="templatethumb movewithmouse opvoorkant"
style="background-image: linear-gradient(0deg, rgba(53, 182, 56, 0.4), rgba(53, 83, 182, 0.3)), url('images/oct-superres/oct_jungle_small_thumb.png'); background-size: cover; background-position: left;">
<a class="templatethumblink w-inline-block" href="project-oct-superres.html">
<div class="templatefrontwrap">
<div class="templatefront">
<img src="images/magnifying-glass-icon.png" height="20">
</div>
</div>
<img class="thumbimgbg">
</a>
</div>
<h3 class="heading-9"><a href="project-oct-superres.html">3D Diffusion Models for Standardized High-Quality OCTs</a></h3>
<h4 class="heading-17">26 March 2023</h4>
</div>
<div data-ix="flowinslow" class="collection-item lazyload w-dyn-item w-col w-col-4">
<div data-ix="goupbox" class="templatethumb movewithmouse opvoorkant"
style="background-image: linear-gradient(0deg, rgba(52, 152, 219, 0.8), rgba(52, 152, 219, 0.2)), url('images/covid_thumb.png'); background-size: cover; background-position: left;">
<a class="templatethumblink w-inline-block" href="project-covid.html">
<div class="templatefrontwrap">
<div class="templatefront">
<img src="images/magnifying-glass-icon.png" height="20">
</div>
</div>
<img class="thumbimgbg">
</a>
</div>
<h3 class="heading-9"><a href="project-covid.html">Automated COVID-19 Grading in CT Scans</a></h3>
<h4 class="heading-17">8 October 2021</h4>
</div>
</div>
</div>
</div>
<div class="column-6 w-col w-col-3 w-col-stack">
<a href="projects.html" data-ix="flowinslow" class="link-block voorhome w-inline-block">
<div data-ix="showarrowgoupbox" class="morerect meerophome movewithmouse meernietophome">
<h1 class="infronthead moreinfront bekijkmeer">More projects</h1><img src="images/arrow-point-to-right.png" class="arrowright"></div>
</a>
</div>
</div>
</div>
<div id="publication-modal-vent23a" class="modal">
<div class="modal-content">
<span class="close" id="close-publication-modal-vent23a">×</span>
<p>C. de Vente, K.A. Vermeer, N. Jaccard, H. Wang, H. Sun, F. Khader, D. Truhn, T. Aimyshev, Y. Zhanibekuly, T. Le, A. Galdran, M.Á. González Ballester, G. Carneiro, D.R. G, H.P. S, D. Puthussery, H. Liu, Z. Yang, S. Kondo, S. Kasai, E. Wang, A. Durvasula, J. Heras, M.Á. Zapata, T. Araújo, G. Aresta, H. Bogunović, M. Arikan, Y.C. Lee, H.B. Cho, Y.H. Choi, A. Qayyum, I. Razzak, B. van Ginneken, H.G. Lemij and C.I. Sánchez. "AIROGS: Artificial Intelligence for RObust Glaucoma Screening Challenge", <i>IEEE Transactions on Medical Imaging</i>, 2023;43(1):542-557.</p>
<pre id="publication-modal-vent23a-pre">@article{Vent23a,
title={AIROGS: Artificial Intelligence for RObust Glaucoma Screening Challenge},
author={de Vente, Coen and Vermeer, Koenraad A. and Jaccard, Nicolas and Wang, He and Sun, Hongyi and Khader, Firas and Truhn, Daniel and Aimyshev, Temirgali and Zhanibekuly, Yerkebulan and Le, Tien-Dung and Galdran, Adrian and González Ballester, Miguel Ángel and Carneiro, Gustavo and G, Devika R and S, Hrishikesh P and Puthussery, Densen and Liu, Hong and Yang, Zekang and Kondo, Satoshi and Kasai, Satoshi and Wang, Edward and Durvasula, Ashritha and Heras, Jónathan and Zapata, Miguel Ángel and Araújo, Teresa and Aresta, Guilherme and Bogunović, Hrvoje and Arikan, Mustafa and Lee, Yeong Chan and Cho, Hyun Bin and Choi, Yoon Ho and Qayyum, Abdul and Razzak, Imran and van Ginneken, Bram and Lemij, Hans G. and Sánchez, Clara I.},
journal={IEEE Transactions on Medical Imaging},
year={2023},
scholar_id={12607038868340347612,10290591934597328501,16965053761187170060,12629941101464488854},
publisher={IEEE},
volume={43},
number={1},
pages={542-557},
doi={10.1109/TMI.2023.3313786},
pmid={37713220}
}</pre>
<div class="publication-button-group">
<a data-ix="goupbox" onclick="copy_text('publication-modal-vent23a')"
class="knop footerknop movewithmouse w-button publication-button publication-button-black">
<img src="images/copy.png" class="copy-icon"> Copy
</a>
</div>
</div>
</div>
<div class="div-block-17">
<div data-ix="flowinslow" class="div-block-34">
<h1 class="heading-7">Want to get in contact?</h1><a data-ix="goupbox" href="contact.html" class="knop footerknop movewithmouse w-button">Drop me a line</a></div>
</div>
<div class="section-5">
<div class="row-3 w-row">
<div class="column colrightarr w-col w-col-4">
<div class="div-block-9">
<div class="text-block-8">sitemap</div>
</div>
<div class="div-block-9"><a href="index.html" class="link">home</a></div>
<div class="div-block-9"><a href="projects.html" class="link">posts</a></div>
<div class="div-block-9"><a href="publications.html" class="link">publications</a></div>
<div class="div-block-9"><a href="about.html" class="link">about</a></div>
<div class="div-block-9"><a href="contact.html" class="link">contact</a></div>
</div>
<div class="colrightarr w-col w-col-4">
<div class="div-block-9">
<div class="text-block-8">contact</div>
</div>
<div class="div-block-9"><a href="mailto:hello@coendevente.com" class="link">hello@coendevente.com</a></div>
<div class="div-block-9"><a href="contact.html" class="link">contact form</a></div>
<div class="div-block-9"><a href="https://calendar.google.com/calendar/appointments/AcZssZ1dSioNVM88OG4Ta6X_cBKh6_3TFOWJ9hHaHHc=" class="link">schedule a meeting with me</a></div>
</div>
<div class="colrightarr w-col w-col-4 norightbar">
<div class="div-block-9">
<div class="text-block-8">links</div>
</div>
<!-- <div class="div-block-9 langetekst"> -->
<div class="div-block-9">
<a target="_blank" href="https://scholar.google.com/citations?user=AqL8A60AAAAJ" class="link">
<img class="link-icon" src="images/links/scholar.svg" /> Google Scholar
</a>
</div>
<div class="div-block-9">
<a target="_blank" href="https://www.linkedin.com/in/coendevente/" class="link">
<img class="link-icon" src="images/links/linkedin.png" /> LinkedIn
</a>
</div>
<div class="div-block-9">
<a target="_blank" href="https://qurai.amsterdam/researcher/coen_de_vente/" class="link">
<img class="link-icon" src="images/links/qurai.png" /> QurAI
</a>
</div>
<div class="div-block-9">
<a target="_blank" href="https://www.diagnijmegen.nl/people/coen-de-vente/" class="link">
<img class="link-icon" src="images/links/radboudumc.png" /> DIAG Nijmegen
</a>
</div>
<div class="div-block-9">
<a target="_blank" href="https://github.com/coendevente" class="link">
<img class="link-icon" src="images/links/github.svg" /> GitHub
</a>
</div>
<!-- <div class="div-block-9">
<a target="_blank" href="https://github.com/coendevente/build-coendevente.com" class="link">
<img class="link-icon" src="images/links/github.svg" /> coendevente.com
</a>
</div> -->
<!-- </div> -->
</div>
</div>
</div>
<script src="https://code.jquery.com/jquery-3.3.1.min.js" type="text/javascript" intergrity="sha256-FgpCb/KJQlLNfOu91ta32o/NMZxltwRo8QtmkMRdAu8=" crossorigin="anonymous"></script>
<script src="js/webflow.js" type="text/javascript"></script>
<!-- [if lte IE 9]><script src="https://cdnjs.cloudflare.com/ajax/libs/placeholders/3.0.2/placeholders.min.js"></script><![endif] -->
<style>
.w-webflow-badge {
display: none !important;
}
.w-nav,
.w--nav-menu-open,
.w-nav-link {
transition: all .5s;
}
.w-nav.colored,
.w--nav-menu-open.colored {
background: #FFF;
box-shadow: 0 -10px 30px #333;
}
.w-nav-link.colored {
color: #333 !important;
}
.w-icon-nav-menu.colored {
color: #333;
}
.thumbimgbg {
position: relative;
top: 0;
z-index: 0;
}
.templatefront {
transition: opacity .3s;
}
</style>
<script>
lazyload();
function refresh() {
thresh = 0;
fromTop = $(window).scrollTop();
navHeight = $(".w-nav").height() + $(".w-nav-menu").height();
wittebalk = $(".wittebalk").html() === "1";
alwaysDarkText = $(".naastmockup").css("color") === "rgb(51, 51, 51)";
if (thresh < fromTop || $(window).width() <= 991 || wittebalk || alwaysDarkText) {
$(".w-nav").addClass("colored");
$(".w-nav-link").addClass("colored");
$(".blacklogo").show();
$(".whitelogo").hide();
$(".w-icon-nav-menu").addClass("colored");
$(".w--nav-menu-open").addClass("colored");
} else {
$(".w-nav").removeClass("colored");
$(".w-nav-link").removeClass("colored");
$(".blacklogo").hide();
$(".whitelogo").show();
$(".w-icon-nav-menu").removeClass("colored");
$(".w--nav-menu-open").removeClass("colored");
}
}
$(document).click(function() {
refresh();
});
$(document).ready(function() {
refresh();
});
$(window).scroll(function() {
refresh();
});
$(window).resize(function() {
refresh();
});
$(".movewithmouse").mousemove(function(e) {
cursorX = e.clientX;
cursorY = e.clientY;
divW = $(this).outerWidth();
divH = $(this).outerHeight();
maxXdeg = divW / 50;
maxYdeg = maxXdeg; //10;
maxZdeg = -(divW / 400); //0;//3;//2;
maxXpx = divW / 50; //10;//30;
maxYpx = divW / 50; //10;//30;
divCenterX = $(this).offset().left - $(window).scrollLeft() + (divW / 2);
divCenterY = $(this).offset().top - $(window).scrollTop() + (divH / 2);
xDist = cursorX - divCenterX;
yDist = cursorY - divCenterY;
xDeg = -maxXdeg * (xDist / (divW / 2));
yDeg = maxYdeg * (yDist / (divH / 2));
zDeg = maxZdeg * (xDist * yDist / (divW * divH / 4));
xPx = maxXpx * (xDist / (divW / 2));
yPx = maxYpx * (yDist / (divH / 2));
cssText = "scale(1.04)";
cssText += " rotateY(" + xDeg + "deg) rotateX(" + yDeg + "deg) rotateZ(" + zDeg + "deg)";
cssText += " translateX(" + xPx + "px) translateY(" + yPx + "px)";
$(this).css("transform", cssText);
});
$(".templatethumb").hover(function() {
thumbW = $(this).width();
thumbH = $(this).height();
img = $(this).find(".thumbimgbg");
imgH = img.height();
v = 45; // px per sec
t = imgH / v;
$(".thumbimgbg").css("transition", "top " + t + "s");
console.log(t);
img.css("top", -imgH + thumbH);
$(this).find(".templatefront").width(thumbW);
$(this).find(".templatefront").height(thumbH);
$(this).find(".templatefront").css("opacity", 1);
}, function() {
img.css("top", 0);
$(this).find(".templatefront").css("opacity", 0);
});
</script>
<!--End of Tawk.to Script -->
<style>
.hideword {
/* display: none; */
opacity: 0;
}
.switchwordswrap {
text-align: left;
}
.switchwords,
.holdplace {
display: block;
float: left;
height: 70px;
}
@media(max-width: 767px) {
.switchwords,
.holdplace {
height: 55px;
}
}
@media(max-width: 479px) {
.switchwords,
.holdplace {
height: 45px;
}
}
.wordwrapper {
position: absolute;
height: 0;
width: 0;
}
.nofloatleft {
/* float: none; */
}
.floatleftwrap {
/* overflow: hidden; */
}
</style>
<script type="text/javascript" src="js/jquery.min.js"></script>
<script type="text/javascript" src="js/plax.js"></script>
<script>
$.plax.enable()
$('.medical-image').plaxify()
$('.ground-bg').plaxify()
$('.plax').plaxify()
</script>
<script>
Array.from(document.getElementsByClassName("modal")).forEach(
function(modal) {
// Get the button that opens the modal
var btn = document.getElementById(modal.id + "-button");
console.log(btn);
// Get the <span> element that closes the modal
var span = document.getElementById("close-" + modal.id);
// When the user clicks on the button, open the modal
btn.onclick = function() {
modal.style.display = "block";
}
// When the user clicks on <span> (x), close the modal
span.onclick = function() {
modal.style.display = "none";
}
}
);
// When the user clicks anywhere outside of the modal, close it
window.onclick = function(event) {
Array.from(document.getElementsByClassName("modal")).forEach(
function(modal) {
if (event.target == modal) {
modal.style.display = "none";
console.log(modal)
}
}
)
};
</script>
</body>
</html>