2023
Aswath, Anusha; Alsahaf, Ahmad; Giepmans, Ben N. G.; Azzopardi, George
Segmentation in large-scale cellular electron microscopy with deep learning: A literature survey Journal Article
Links | BibTeX | Altmetric | Tags: deep learning, electron microscopy, medical image analysis, segmentation
@article{Aswath2023,
title = {Segmentation in large-scale cellular electron microscopy with deep learning: A literature survey},
author = {Aswath, Anusha and Alsahaf, Ahmad and Giepmans, Ben N. G. and Azzopardi, George},
doi = {https://doi.org/10.1016/j.media.2023.102920},
year = {2023},
date = {2023-08-06},
urldate = {2023-08-06},
journal = {Medical image Analysis},
number = {102920},
keywords = {deep learning, electron microscopy, medical image analysis, segmentation},
pubstate = {published},
tppubtype = {article}
}
2021
Shi, Chenyu; Meijer, Joost M; Azzopardi, George; Diercks, Gilles FH; Guo, Jiapan; Petkov, Nicolai
Abstract | Links | BibTeX | Altmetric | Tags: convnets, deep learning, medical image analysis
@article{shi2021use,
title = {Use of Convolutional Neural Networks for the Detection of u-Serrated Patterns in Direct Immunofluorescence Images to Facilitate the Diagnosis of Epidermolysis Bullosa Acquisita},
author = {Chenyu Shi and Joost M Meijer and George Azzopardi and Gilles FH Diercks and Jiapan Guo and Nicolai Petkov},
doi = {10.1016/j.ajpath.2021.05.024},
year = {2021},
date = {2021-01-01},
urldate = {2021-01-01},
journal = {The American journal of pathology},
volume = {191},
number = {9},
pages = {1520--1525},
publisher = {Elsevier},
abstract = {The u-serrated immunodeposition pattern in direct immunofluorescence (DIF) microscopy is a recognizable feature and confirmative for the diagnosis of epidermolysis bullosa acquisita (EBA). Due to unfamiliarity with serrated patterns, serration pattern recognition is still of limited use in routine DIF microscopy. The objective of this study was to investigate the feasibility of using convolutional neural networks (CNNs) for the recognition of u-serrated patterns that can assist in the diagnosis of EBA. The nine most commonly used CNNs were trained and validated by using 220,800 manually delineated DIF image patches from 106 images of 46 different patients. The data set was split into 10 subsets: nine training subsets from 42 patients to train CNNs and the last subset from the remaining four patients for a validation data set of diagnostic accuracy. This process was repeated 10 times with a different subset used for validation. The best-performing CNN achieved a specificity of 89.3% and a corresponding sensitivity of 89.3% in the classification of u-serrated DIF image patches, an expert level of diagnostic accuracy. Experiments and results show the effectiveness of CNN approaches for u-serrated pattern recognition with a high accuracy. The proposed approach can assist clinicians and pathologists in recognition of u-serrated patterns in DIF images and facilitate the diagnosis of EBA.},
keywords = {convnets, deep learning, medical image analysis},
pubstate = {published},
tppubtype = {article}
}
2020
Ramachandran, Sivakumar; Strisciuglio, Nicola; Vinekar, Anand; John, Renu; Azzopardi, George
U-COSFIRE filters for vessel tortuosity quantification with application to automated diagnosis of retinopathy of prematurity Journal Article
Abstract | Links | BibTeX | Altmetric | Tags: brain-inspired, medical image analysis, trainable filters
@article{ramachandran2020u,
title = {U-COSFIRE filters for vessel tortuosity quantification with application to automated diagnosis of retinopathy of prematurity},
author = {Sivakumar Ramachandran and Nicola Strisciuglio and Anand Vinekar and Renu John and George Azzopardi},
doi = {https://doi.org/10.1007/s00521-019-04697-6},
year = {2020},
date = {2020-01-01},
urldate = {2020-01-01},
journal = {Neural Computing and Applications},
volume = {32},
number = {16},
pages = {12453--12468},
publisher = {Springer London},
abstract = {Retinopathy of prematurity (ROP) is a sight threatening disorder that primarily affects preterm infants. It is the major reason for lifelong vision impairment and childhood blindness. Digital fundus images of preterm infants obtained from a Retcam Ophthalmic Imaging Device are typically used for ROP screening. ROP is often accompanied by Plus disease that is characterized by high levels of arteriolar tortuosity and venous dilation. The recent diagnostic procedures view the prevalence of Plus disease as a factor of prognostic significance in determining its stage, progress and severity. Our aim is to develop a diagnostic method, which can distinguish images of retinas with ROP from healthy ones and that can be interpreted by medical experts. We investigate the quantification of retinal blood vessel tortuosity via a novel U-COSFIRE (Combination Of Shifted Filter Responses) filter and propose a computer-aided diagnosis tool for automated ROP detection. The proposed methodology involves segmentation of retinal blood vessels using a set of B-COSFIRE filters with different scales followed by the detection of tortuous vessels in the obtained vessel map by means of U-COSFIRE filters. We also compare our proposed technique with an angle-based diagnostic method that utilizes the magnitude and orientation responses of the multi-scale B-COSFIRE filters. We carried out experiments on a new data set of 289 infant retinal images (89 with ROP and 200 healthy) that we collected from the programme in India called KIDROP (Karnataka Internet Assisted Diagnosis of Retinopathy of Prematurity). We used 10 images (5 with ROP and 5 healthy) for learning the parameters of our methodology and the remaining 279 images (84 with ROP and 195 healthy) for performance evaluation. We achieved sensitivity and specificity equal to 0.98 and 0.97, respectively, computed on the 279 test images. The obtained results and its explainable character demonstrate the effectiveness of the proposed approach to assist medical experts.},
keywords = {brain-inspired, medical image analysis, trainable filters},
pubstate = {published},
tppubtype = {article}
}
2019
Guo, Jiapan; Azzopardi, George; Shi, Chenyu; Jansonius, Nomdo M; Petkov, Nicolai
Automatic Determination of Vertical Cup-to-Disc Ratio in Retinal Fundus Images for Glaucoma Screening Journal Article
Abstract | Links | BibTeX | Altmetric | Tags: brain-inspired, medical image analysis, trainable filters
@article{guo2019automatic,
title = {Automatic Determination of Vertical Cup-to-Disc Ratio in Retinal Fundus Images for Glaucoma Screening},
author = {Jiapan Guo and George Azzopardi and Chenyu Shi and Nomdo M Jansonius and Nicolai Petkov},
doi = {10.1109/ACCESS.2018.2890544},
year = {2019},
date = {2019-01-01},
urldate = {2019-01-01},
journal = {IEEE Access},
volume = {7},
pages = {8527--8541},
publisher = {IEEE},
abstract = {Glaucoma is a chronic progressive optic neuropathy that causes visual impairment or blindness if left untreated. It is crucial to diagnose it at an early stage in order to enable treatment. Fundus photography is a viable option for population-based screening. A fundus photograph enables the observation of the excavation of the optic disk\textemdashthe hallmark of glaucoma. The excavation is quantified as a vertical cup-to-disk ratio (VCDR). The manual assessment of retinal fundus images is, however, time-consuming and costly. Thus, an automated system is necessary to assist human observers. We propose a computer-aided diagnosis system, which consists of the localization of the optic disk, the determination of the height of the optic disk and the cup, and the computation of the VCDR. We evaluated the performance of our approach on eight publicly available datasets, which have, in total, 1712 retinal fundus images. We compared the obtained VCDR values with those provided by an experienced ophthalmologist and achieved a weighted VCDR mean difference of 0.11. The system provides a reliable estimation of the height of the optic disk and the cup in terms of the relative height error (RHE = 0.08 and 0.09, respectively). The Bland\textendashAltman analysis showed that the system achieves a good agreement with the manual annotations, especially for large VCDRs which indicate pathology.},
keywords = {brain-inspired, medical image analysis, trainable filters},
pubstate = {published},
tppubtype = {article}
}
Shi, Chenyu; Meijer, Joost M; Guo, Jiapan; Azzopardi, George; Diercksr, Gilles FH; Schmidt, Enno; Zillikens, Detlef; Jonkman, Marcel F; Petkov, Nicolai
Detection of u-serrated patterns in direct immunofluorescence images of autoimmune bullous diseases by inhibition-augmented COSFIRE filters Journal Article
Abstract | Links | BibTeX | Altmetric | Tags: brain-inspired, medical image analysis, noise suppression, trainable filters
@article{shi2019detection,
title = {Detection of u-serrated patterns in direct immunofluorescence images of autoimmune bullous diseases by inhibition-augmented COSFIRE filters},
author = {Chenyu Shi and Joost M Meijer and Jiapan Guo and George Azzopardi and Gilles FH Diercksr and Enno Schmidt and Detlef Zillikens and Marcel F Jonkman and Nicolai Petkov},
doi = {10.1016/j.ijmedinf.2018.11.007},
year = {2019},
date = {2019-01-01},
urldate = {2019-01-01},
journal = {International Journal of Medical Informatics},
volume = {122},
pages = {27--36},
publisher = {Elsevier},
abstract = {Direct immunofluorescence (DIF) microscopy of a skin biopsy is used by physicians and pathologists to diagnose autoimmune bullous dermatoses (AIBD). This technique is the reference standard for diagnosis of AIBD, which is used worldwide in medical laboratories. For diagnosis of subepidermal AIBD (sAIBD), two different types of serrated pattern of immunodepositions can be recognized from DIF images, namely n- and u-serrated patterns. The n-serrated pattern is typically found in the most common sAIBD bullous pemphigoid. Presence of the u-serrated pattern indicates the sAIBD subtype epidermolysis bullosa acquisita (EBA), which has a different prognosis and requires a different treatment. The manual identification of these serrated patterns is learnable but challenging. We propose an automatic technique that is able to localize u-serrated patterns for automated computer-assisted diagnosis of EBA. The distinctive feature of u-serrated patterns as compared to n-serrated patterns is the presence of ridge-endings. We introduce a novel ridge-ending detector which uses inhibition-augmented trainable COSFIRE filters. Then, we apply a hierarchical clustering approach to detect the suspicious u-serrated patterns from the detected ridge-endings. For each detected u-serrated pattern we provide a score that indicates the reliability of its detection. In order to evaluate the proposed approach, we created a data set with 180 DIF images for serration pattern analysis. This data set consists of seven subsets which were obtained from various biopsy samples under different conditions. We achieve an average recognition rate of 82.2% of the u-serrated pattern on these 180 DIF images, which is comparable to the recognition rate achieved by experienced medical doctors and pathologists.},
keywords = {brain-inspired, medical image analysis, noise suppression, trainable filters},
pubstate = {published},
tppubtype = {article}
}
2016
Strisciuglio, Nicola; Azzopardi, George; Vento, Mario; Petkov, Nicolai
Supervised vessel delineation in retinal fundus images with the automatic selection of B-COSFIRE filters Journal Article
Abstract | Links | BibTeX | Altmetric | Tags: brain-inspired, medical image analysis, segmentation, trainable filters
@article{strisciuglio2016supervised,
title = {Supervised vessel delineation in retinal fundus images with the automatic selection of B-COSFIRE filters},
author = {Nicola Strisciuglio and George Azzopardi and Mario Vento and Nicolai Petkov},
doi = {https://doi.org/10.1007/s00138-016-0781-7},
year = {2016},
date = {2016-01-01},
urldate = {2016-01-01},
journal = {Machine Vision and Applications},
publisher = {Springer Berlin Heidelberg},
abstract = {The inspection of retinal fundus images allows medical doctors to diagnose various pathologies. Computer-aided diagnosis systems can be used to assist in this process. As a first step, such systems delineate the vessel tree from the background. We propose a method for the delineation of blood vessels in retinal images that is effective for vessels of different thickness. In the proposed method, we employ a set of B-COSFIRE filters selective for vessels and vessel-endings. Such a set is determined in an automatic selection process and can adapt to different applications. We compare the performance of different selection methods based upon machine learning and information theory. The results that we achieve by performing experiments on two public benchmark data sets, namely DRIVE and STARE, demonstrate the effectiveness of the proposed approach.},
keywords = {brain-inspired, medical image analysis, segmentation, trainable filters},
pubstate = {published},
tppubtype = {article}
}
2015
Azzopardi, George; Strisciuglio, Nicola; Vento, Mario; Petkov, Nicolai
Trainable COSFIRE filters for vessel delineation with application to retinal images Journal Article
Abstract | Links | BibTeX | Altmetric | Tags: brain-inspired, medical image analysis, segmentation, trainable filters
@article{azzopardi2015trainable,
title = {Trainable COSFIRE filters for vessel delineation with application to retinal images},
author = {George Azzopardi and Nicola Strisciuglio and Mario Vento and Nicolai Petkov},
doi = {https://doi.org/10.1016/j.media.2014.08.002},
year = {2015},
date = {2015-01-01},
urldate = {2015-01-01},
journal = {Medical image analysis},
volume = {19},
number = {1},
pages = {46--57},
publisher = {Elsevier},
abstract = {Retinal imaging provides a non-invasive opportunity for the diagnosis of several medical pathologies. The automatic segmentation of the vessel tree is an important pre-processing step which facilitates subsequent automatic processes that contribute to such diagnosis.
We introduce a novel method for the automatic segmentation of vessel trees in retinal fundus images. We propose a filter that selectively responds to vessels and that we call B-COSFIRE with B standing for bar which is an abstraction for a vessel. It is based on the existing COSFIRE (Combination Of Shifted Filter Responses) approach. A B-COSFIRE filter achieves orientation selectivity by computing the weighted geometric mean of the output of a pool of Difference-of-Gaussians filters, whose supports are aligned in a collinear manner. It achieves rotation invariance efficiently by simple shifting operations. The proposed filter is versatile as its selectivity is determined from any given vessel-like prototype pattern in an automatic configuration process. We configure two B-COSFIRE filters, namely symmetric and asymmetric, that are selective for bars and bar-endings, respectively. We achieve vessel segmentation by summing up the responses of the two rotation-invariant B-COSFIRE filters followed by thresholding.
The results that we achieve on three publicly available data sets (DRIVE: Se = 0.7655, Sp = 0.9704; STARE: Se = 0.7716, Sp = 0.9701; CHASE_DB1: Se = 0.7585, Sp = 0.9587) are higher than many of the state-of-the-art methods. The proposed segmentation approach is also very efficient with a time complexity that is significantly lower than existing methods.},
keywords = {brain-inspired, medical image analysis, segmentation, trainable filters},
pubstate = {published},
tppubtype = {article}
}
We introduce a novel method for the automatic segmentation of vessel trees in retinal fundus images. We propose a filter that selectively responds to vessels and that we call B-COSFIRE with B standing for bar which is an abstraction for a vessel. It is based on the existing COSFIRE (Combination Of Shifted Filter Responses) approach. A B-COSFIRE filter achieves orientation selectivity by computing the weighted geometric mean of the output of a pool of Difference-of-Gaussians filters, whose supports are aligned in a collinear manner. It achieves rotation invariance efficiently by simple shifting operations. The proposed filter is versatile as its selectivity is determined from any given vessel-like prototype pattern in an automatic configuration process. We configure two B-COSFIRE filters, namely symmetric and asymmetric, that are selective for bars and bar-endings, respectively. We achieve vessel segmentation by summing up the responses of the two rotation-invariant B-COSFIRE filters followed by thresholding.
The results that we achieve on three publicly available data sets (DRIVE: Se = 0.7655, Sp = 0.9704; STARE: Se = 0.7716, Sp = 0.9701; CHASE_DB1: Se = 0.7585, Sp = 0.9587) are higher than many of the state-of-the-art methods. The proposed segmentation approach is also very efficient with a time complexity that is significantly lower than existing methods.
2013
Azzopardi, George; Petkov, Nicolai
Automatic detection of vascular bifurcations in segmented retinal images using trainable COSFIRE filters Journal Article
Abstract | Links | BibTeX | Altmetric | Tags: brain-inspired, keypoint detection, medical image analysis, trainable filters
@article{azzopardi2013automatic,
title = {Automatic detection of vascular bifurcations in segmented retinal images using trainable COSFIRE filters},
author = {George Azzopardi and Nicolai Petkov},
doi = {https://doi.org/10.1016/j.patrec.2012.11.002},
year = {2013},
date = {2013-01-01},
urldate = {2013-01-01},
journal = {Pattern Recognition Letters},
volume = {34},
number = {8},
pages = {922--933},
publisher = {North-Holland},
abstract = {Background: The vascular tree observed in a retinal fundus image can provide clues for cardiovascular diseases. Its analysis requires the identification of vessel bifurcations and crossovers.
Methods: We use a set of trainable keypoint detectors that we call Combination Of Shifted FIlter REsponses or COSFIRE filters to automatically detect vascular bifurcations in segmented retinal images. We configure a set of COSFIRE filters that are selective for a number of prototype bifurcations and demonstrate that such filters can be effectively used to detect bifurcations that are similar to the prototypical ones. The automatic configuration of such a filter selects given channels of a bank of Gabor filters and determines certain blur and shift parameters. The response of a COSFIRE filter is computed as the weighted geometric mean of the blurred and shifted responses of the selected Gabor filters. The COSFIRE approach is inspired by the function of a specific type of shape-selective neuron in area V4 of visual cortex.
Results: We ran experiments on three data sets and achieved the following results: (a) a recall of 97.88% at precision of 96.94% on 40 manually segmented images provided in the DRIVE data set, (b) a recall of 97.32% at precision of 96.04% on 20 manually segmented images provided in the STARE data set, and (c) a recall of 97.02% at precision of 96.53% on a set of 10 automatically segmented images obtained from images in the DRIVE data set.
Conclusions: The COSFIRE filters that we use are conceptually simple and easy to implement: the filter output is computed as the weighted geometric mean of blurred and shifted Gabor filter responses. They are versatile keypoint detectors as they can be configured with any given local contour pattern and are subsequently able to detect the same and similar patterns.},
keywords = {brain-inspired, keypoint detection, medical image analysis, trainable filters},
pubstate = {published},
tppubtype = {article}
}
Methods: We use a set of trainable keypoint detectors that we call Combination Of Shifted FIlter REsponses or COSFIRE filters to automatically detect vascular bifurcations in segmented retinal images. We configure a set of COSFIRE filters that are selective for a number of prototype bifurcations and demonstrate that such filters can be effectively used to detect bifurcations that are similar to the prototypical ones. The automatic configuration of such a filter selects given channels of a bank of Gabor filters and determines certain blur and shift parameters. The response of a COSFIRE filter is computed as the weighted geometric mean of the blurred and shifted responses of the selected Gabor filters. The COSFIRE approach is inspired by the function of a specific type of shape-selective neuron in area V4 of visual cortex.
Results: We ran experiments on three data sets and achieved the following results: (a) a recall of 97.88% at precision of 96.94% on 40 manually segmented images provided in the DRIVE data set, (b) a recall of 97.32% at precision of 96.04% on 20 manually segmented images provided in the STARE data set, and (c) a recall of 97.02% at precision of 96.53% on a set of 10 automatically segmented images obtained from images in the DRIVE data set.
Conclusions: The COSFIRE filters that we use are conceptually simple and easy to implement: the filter output is computed as the weighted geometric mean of blurred and shifted Gabor filter responses. They are versatile keypoint detectors as they can be configured with any given local contour pattern and are subsequently able to detect the same and similar patterns.