Bibtex de la publication

@InProceedings{ Mi2014.1,
author = {Mitran, Madalina and Cabanac, Guillaume and Boughanem, Mohand},
title = "{GeoTime-Based Tag Ranking Model for Automatic Image Annotation (regular paper)}",
booktitle = "{ACM Symposium on Applied Computing (SAC), Gyeongju, Korea, 24/03/2014-28/03/2014}",
year = {2014},
month = {mars},
publisher = {ACM},
address = {},
pages = {896--901},
language = {anglais},
URL = {, -},
keywords = {image annotations, social multimedia, tag ranking model, spatial and temporal proximities, Flickr},
abstract = {In this paper, we propose a novel tag ranking model to address the automatic image annotation research problem. Our aim is to assign relevant descriptors to a query image, using textual, spatial, and temporal cues from nearby images. The assumption behind our model is that tags associated with images that are closer in time and space with a query image have a higher probability to be relevant. Given a query image we retrieve the images (available in community image databases, such as and located in its close geographical area using its GPS coordinates (i.e., latitude and longitude). Once, these images retrieved, we take advantage of their metadata (e.g., users' social contributions and information stored in the EXIF descriptor [7]) in order to suggest and rank tags. While, most of state-of-the-art approaches used visual and textual factors to suggest and rank tags, our model, uses also temporal and spatial proximity factors. To capture these proximity factors we used similarity methods and kernel functions. Finally, the top-ranked tags are used to annotate a query image. We conducted a series of experiments on a dataset consisting of over 201,000 Flickr images from the Paris geographic area. The experimental results showed that our tag ranking model provides significant improvement over two state of the art baselines.}