2016
Guo, Jiapan; Shi, Chenyu; Azzopardi, George; Petkov, Nicolai
Inhibition-augmented trainable COSFIRE filters for keypoint detection and object recognition Journal Article
Abstract | Links | BibTeX | Altmetric | Tags: brain-inspired, keypoint detection, noise suppression, object detection, trainable filters
@article{guo2016inhibition,
title = {Inhibition-augmented trainable COSFIRE filters for keypoint detection and object recognition},
author = {Jiapan Guo and Chenyu Shi and George Azzopardi and Nicolai Petkov},
doi = {https://doi.org/10.1007/s00138-016-0777-3},
year = {2016},
date = {2016-01-01},
urldate = {2016-01-01},
journal = {Machine Vision and Applications},
volume = {27},
pages = {1197-1211},
publisher = {Springer Berlin Heidelberg},
abstract = {The shape and meaning of an object can radically change with the addition of one or more contour parts. For instance, a T-junction can become a crossover. We extend the COSFIRE trainable filter approach which uses a positive prototype pattern for configuration by adding a set of negative prototype patterns. The configured filter responds to patterns that are similar to the positive prototype but not to any of the negative prototypes. The configuration of such a filter comprises selecting given channels of a bank of Gabor filters that provide excitatory or inhibitory input and determining certain blur and shift parameters. We compute the response of such a filter as the excitatory input minus a fraction of the maximum of inhibitory inputs. We use three applications to demonstrate the effectiveness of inhibition: the exclusive detection of vascular bifurcations (i.e., without crossovers) in retinal fundus images (DRIVE data set), the recognition of architectural and electrical symbols (GREC’11 data set) and the recognition of handwritten digits (MNIST data set).},
keywords = {brain-inspired, keypoint detection, noise suppression, object detection, trainable filters},
pubstate = {published},
tppubtype = {article}
}
2013
Azzopardi, George; Petkov, Nicolai
Automatic detection of vascular bifurcations in segmented retinal images using trainable COSFIRE filters Journal Article
Abstract | Links | BibTeX | Altmetric | Tags: brain-inspired, keypoint detection, medical image analysis, trainable filters
@article{azzopardi2013automatic,
title = {Automatic detection of vascular bifurcations in segmented retinal images using trainable COSFIRE filters},
author = {George Azzopardi and Nicolai Petkov},
doi = {https://doi.org/10.1016/j.patrec.2012.11.002},
year = {2013},
date = {2013-01-01},
urldate = {2013-01-01},
journal = {Pattern Recognition Letters},
volume = {34},
number = {8},
pages = {922--933},
publisher = {North-Holland},
abstract = {Background: The vascular tree observed in a retinal fundus image can provide clues for cardiovascular diseases. Its analysis requires the identification of vessel bifurcations and crossovers.
Methods: We use a set of trainable keypoint detectors that we call Combination Of Shifted FIlter REsponses or COSFIRE filters to automatically detect vascular bifurcations in segmented retinal images. We configure a set of COSFIRE filters that are selective for a number of prototype bifurcations and demonstrate that such filters can be effectively used to detect bifurcations that are similar to the prototypical ones. The automatic configuration of such a filter selects given channels of a bank of Gabor filters and determines certain blur and shift parameters. The response of a COSFIRE filter is computed as the weighted geometric mean of the blurred and shifted responses of the selected Gabor filters. The COSFIRE approach is inspired by the function of a specific type of shape-selective neuron in area V4 of visual cortex.
Results: We ran experiments on three data sets and achieved the following results: (a) a recall of 97.88% at precision of 96.94% on 40 manually segmented images provided in the DRIVE data set, (b) a recall of 97.32% at precision of 96.04% on 20 manually segmented images provided in the STARE data set, and (c) a recall of 97.02% at precision of 96.53% on a set of 10 automatically segmented images obtained from images in the DRIVE data set.
Conclusions: The COSFIRE filters that we use are conceptually simple and easy to implement: the filter output is computed as the weighted geometric mean of blurred and shifted Gabor filter responses. They are versatile keypoint detectors as they can be configured with any given local contour pattern and are subsequently able to detect the same and similar patterns.},
keywords = {brain-inspired, keypoint detection, medical image analysis, trainable filters},
pubstate = {published},
tppubtype = {article}
}
Methods: We use a set of trainable keypoint detectors that we call Combination Of Shifted FIlter REsponses or COSFIRE filters to automatically detect vascular bifurcations in segmented retinal images. We configure a set of COSFIRE filters that are selective for a number of prototype bifurcations and demonstrate that such filters can be effectively used to detect bifurcations that are similar to the prototypical ones. The automatic configuration of such a filter selects given channels of a bank of Gabor filters and determines certain blur and shift parameters. The response of a COSFIRE filter is computed as the weighted geometric mean of the blurred and shifted responses of the selected Gabor filters. The COSFIRE approach is inspired by the function of a specific type of shape-selective neuron in area V4 of visual cortex.
Results: We ran experiments on three data sets and achieved the following results: (a) a recall of 97.88% at precision of 96.94% on 40 manually segmented images provided in the DRIVE data set, (b) a recall of 97.32% at precision of 96.04% on 20 manually segmented images provided in the STARE data set, and (c) a recall of 97.02% at precision of 96.53% on a set of 10 automatically segmented images obtained from images in the DRIVE data set.
Conclusions: The COSFIRE filters that we use are conceptually simple and easy to implement: the filter output is computed as the weighted geometric mean of blurred and shifted Gabor filter responses. They are versatile keypoint detectors as they can be configured with any given local contour pattern and are subsequently able to detect the same and similar patterns.
2012
Azzopardi, George; Petkov, Nicolai
Trainable COSFIRE filters for keypoint detection and pattern recognition Journal Article
Abstract | Links | BibTeX | Altmetric | Tags: brain-inspired, image classification, keypoint detection, object detection, segmentation, trainable filters
@article{azzopardi2013trainable,
title = {Trainable COSFIRE filters for keypoint detection and pattern recognition},
author = {George Azzopardi and Nicolai Petkov},
doi = {10.1109/TPAMI.2012.106},
year = {2012},
date = {2012-01-01},
urldate = {2012-01-01},
journal = { IEEE Transactions on Pattern Analysis and Machine Intelligence},
volume = {35},
issue = {2},
pages = {490 - 503},
publisher = {IEEE},
abstract = {Background: Keypoint detection is important for many computer vision applications. Existing methods suffer from insufficient selectivity regarding the shape properties of features and are vulnerable to contrast variations and to the presence of noise or texture. Methods: We propose a trainable filter which we call Combination Of Shifted FIlter REsponses (COSFIRE) and use for keypoint detection and pattern recognition. It is automatically configured to be selective for a local contour pattern specified by an example. The configuration comprises selecting given channels of a bank of Gabor filters and determining certain blur and shift parameters. A COSFIRE filter response is computed as the weighted geometric mean of the blurred and shifted responses of the selected Gabor filters. It shares similar properties with some shape-selective neurons in visual cortex, which provided inspiration for this work. Results: We demonstrate the effectiveness of the proposed filters in three applications: the detection of retinal vascular bifurcations (DRIVE dataset: 98.50 percent recall, 96.09 percent precision), the recognition of handwritten digits (MNIST dataset: 99.48 percent correct classification), and the detection and recognition of traffic signs in complex scenes (100 percent recall and precision). Conclusions: The proposed COSFIRE filters are conceptually simple and easy to implement. They are versatile keypoint detectors and are highly effective in practical computer vision applications.},
keywords = {brain-inspired, image classification, keypoint detection, object detection, segmentation, trainable filters},
pubstate = {published},
tppubtype = {article}
}