2023
Aswath, Anusha; Alsahaf, Ahmad; Giepmans, Ben N. G.; Azzopardi, George
Segmentation in large-scale cellular electron microscopy with deep learning: A literature survey Journal Article
Links | BibTeX | Altmetric | Tags: deep learning, electron microscopy, medical image analysis, segmentation
@article{Aswath2023,
title = {Segmentation in large-scale cellular electron microscopy with deep learning: A literature survey},
author = {Aswath, Anusha and Alsahaf, Ahmad and Giepmans, Ben N. G. and Azzopardi, George},
doi = {https://doi.org/10.1016/j.media.2023.102920},
year = {2023},
date = {2023-08-06},
urldate = {2023-08-06},
journal = {Medical image Analysis},
number = {102920},
keywords = {deep learning, electron microscopy, medical image analysis, segmentation},
pubstate = {published},
tppubtype = {article}
}
2016
Strisciuglio, Nicola; Azzopardi, George; Vento, Mario; Petkov, Nicolai
Supervised vessel delineation in retinal fundus images with the automatic selection of B-COSFIRE filters Journal Article
Abstract | Links | BibTeX | Altmetric | Tags: brain-inspired, medical image analysis, segmentation, trainable filters
@article{strisciuglio2016supervised,
title = {Supervised vessel delineation in retinal fundus images with the automatic selection of B-COSFIRE filters},
author = {Nicola Strisciuglio and George Azzopardi and Mario Vento and Nicolai Petkov},
doi = {https://doi.org/10.1007/s00138-016-0781-7},
year = {2016},
date = {2016-01-01},
urldate = {2016-01-01},
journal = {Machine Vision and Applications},
publisher = {Springer Berlin Heidelberg},
abstract = {The inspection of retinal fundus images allows medical doctors to diagnose various pathologies. Computer-aided diagnosis systems can be used to assist in this process. As a first step, such systems delineate the vessel tree from the background. We propose a method for the delineation of blood vessels in retinal images that is effective for vessels of different thickness. In the proposed method, we employ a set of B-COSFIRE filters selective for vessels and vessel-endings. Such a set is determined in an automatic selection process and can adapt to different applications. We compare the performance of different selection methods based upon machine learning and information theory. The results that we achieve by performing experiments on two public benchmark data sets, namely DRIVE and STARE, demonstrate the effectiveness of the proposed approach.},
keywords = {brain-inspired, medical image analysis, segmentation, trainable filters},
pubstate = {published},
tppubtype = {article}
}
2015
Azzopardi, George; Strisciuglio, Nicola; Vento, Mario; Petkov, Nicolai
Trainable COSFIRE filters for vessel delineation with application to retinal images Journal Article
Abstract | Links | BibTeX | Altmetric | Tags: brain-inspired, medical image analysis, segmentation, trainable filters
@article{azzopardi2015trainable,
title = {Trainable COSFIRE filters for vessel delineation with application to retinal images},
author = {George Azzopardi and Nicola Strisciuglio and Mario Vento and Nicolai Petkov},
doi = {https://doi.org/10.1016/j.media.2014.08.002},
year = {2015},
date = {2015-01-01},
urldate = {2015-01-01},
journal = {Medical image analysis},
volume = {19},
number = {1},
pages = {46--57},
publisher = {Elsevier},
abstract = {Retinal imaging provides a non-invasive opportunity for the diagnosis of several medical pathologies. The automatic segmentation of the vessel tree is an important pre-processing step which facilitates subsequent automatic processes that contribute to such diagnosis.
We introduce a novel method for the automatic segmentation of vessel trees in retinal fundus images. We propose a filter that selectively responds to vessels and that we call B-COSFIRE with B standing for bar which is an abstraction for a vessel. It is based on the existing COSFIRE (Combination Of Shifted Filter Responses) approach. A B-COSFIRE filter achieves orientation selectivity by computing the weighted geometric mean of the output of a pool of Difference-of-Gaussians filters, whose supports are aligned in a collinear manner. It achieves rotation invariance efficiently by simple shifting operations. The proposed filter is versatile as its selectivity is determined from any given vessel-like prototype pattern in an automatic configuration process. We configure two B-COSFIRE filters, namely symmetric and asymmetric, that are selective for bars and bar-endings, respectively. We achieve vessel segmentation by summing up the responses of the two rotation-invariant B-COSFIRE filters followed by thresholding.
The results that we achieve on three publicly available data sets (DRIVE: Se = 0.7655, Sp = 0.9704; STARE: Se = 0.7716, Sp = 0.9701; CHASE_DB1: Se = 0.7585, Sp = 0.9587) are higher than many of the state-of-the-art methods. The proposed segmentation approach is also very efficient with a time complexity that is significantly lower than existing methods.},
keywords = {brain-inspired, medical image analysis, segmentation, trainable filters},
pubstate = {published},
tppubtype = {article}
}
We introduce a novel method for the automatic segmentation of vessel trees in retinal fundus images. We propose a filter that selectively responds to vessels and that we call B-COSFIRE with B standing for bar which is an abstraction for a vessel. It is based on the existing COSFIRE (Combination Of Shifted Filter Responses) approach. A B-COSFIRE filter achieves orientation selectivity by computing the weighted geometric mean of the output of a pool of Difference-of-Gaussians filters, whose supports are aligned in a collinear manner. It achieves rotation invariance efficiently by simple shifting operations. The proposed filter is versatile as its selectivity is determined from any given vessel-like prototype pattern in an automatic configuration process. We configure two B-COSFIRE filters, namely symmetric and asymmetric, that are selective for bars and bar-endings, respectively. We achieve vessel segmentation by summing up the responses of the two rotation-invariant B-COSFIRE filters followed by thresholding.
The results that we achieve on three publicly available data sets (DRIVE: Se = 0.7655, Sp = 0.9704; STARE: Se = 0.7716, Sp = 0.9701; CHASE_DB1: Se = 0.7585, Sp = 0.9587) are higher than many of the state-of-the-art methods. The proposed segmentation approach is also very efficient with a time complexity that is significantly lower than existing methods.
2012
Azzopardi, George; Petkov, Nicolai
Trainable COSFIRE filters for keypoint detection and pattern recognition Journal Article
Abstract | Links | BibTeX | Altmetric | Tags: brain-inspired, image classification, keypoint detection, object detection, segmentation, trainable filters
@article{azzopardi2013trainable,
title = {Trainable COSFIRE filters for keypoint detection and pattern recognition},
author = {George Azzopardi and Nicolai Petkov},
doi = {10.1109/TPAMI.2012.106},
year = {2012},
date = {2012-01-01},
urldate = {2012-01-01},
journal = { IEEE Transactions on Pattern Analysis and Machine Intelligence},
volume = {35},
issue = {2},
pages = {490 - 503},
publisher = {IEEE},
abstract = {Background: Keypoint detection is important for many computer vision applications. Existing methods suffer from insufficient selectivity regarding the shape properties of features and are vulnerable to contrast variations and to the presence of noise or texture. Methods: We propose a trainable filter which we call Combination Of Shifted FIlter REsponses (COSFIRE) and use for keypoint detection and pattern recognition. It is automatically configured to be selective for a local contour pattern specified by an example. The configuration comprises selecting given channels of a bank of Gabor filters and determining certain blur and shift parameters. A COSFIRE filter response is computed as the weighted geometric mean of the blurred and shifted responses of the selected Gabor filters. It shares similar properties with some shape-selective neurons in visual cortex, which provided inspiration for this work. Results: We demonstrate the effectiveness of the proposed filters in three applications: the detection of retinal vascular bifurcations (DRIVE dataset: 98.50 percent recall, 96.09 percent precision), the recognition of handwritten digits (MNIST dataset: 99.48 percent correct classification), and the detection and recognition of traffic signs in complex scenes (100 percent recall and precision). Conclusions: The proposed COSFIRE filters are conceptually simple and easy to implement. They are versatile keypoint detectors and are highly effective in practical computer vision applications.},
keywords = {brain-inspired, image classification, keypoint detection, object detection, segmentation, trainable filters},
pubstate = {published},
tppubtype = {article}
}