---
AUTHORS | 201 ++-
doc/Doxyfile.in | 10 +-
doc/DoxygenLayout.xml | 2 +-
doc/Makefile.am | 46 +-
doc/bibliography.dox | 37 +
doc/bibliography.hh | 19 -
doc/contributors.dox | 5 +
doc/lrde.bib | 3856 +++++++++++++++++++++++++++++
doc/{mainpage.hh => mainpage.dox} | 6 +-
doc/{modules_list.hh => modules_list.dox} | 0
doc/olena.qhcp | 12 -
scribo/doc/Doxyfile.in | 13 +-
scribo/doc/groups.dox | 19 +
scribo/scribo/core/component_set.hh | 4 +
scribo/scribo/core/document.hh | 7 +-
scribo/scribo/core/line_links.hh | 4 +-
scribo/scribo/core/line_set.hh | 7 +-
scribo/scribo/core/object_groups.hh | 2 +
scribo/scribo/core/object_links.hh | 4 +-
scribo/scribo/core/paragraph_set.hh | 1 +
20 files changed, 4141 insertions(+), 114 deletions(-)
create mode 100644 doc/bibliography.dox
delete mode 100644 doc/bibliography.hh
create mode 100644 doc/contributors.dox
create mode 100644 doc/lrde.bib
rename doc/{mainpage.hh => mainpage.dox} (80%)
rename doc/{modules_list.hh => modules_list.dox} (100%)
delete mode 100644 doc/olena.qhcp
create mode 100644 scribo/doc/groups.dox
diff --git a/AUTHORS b/AUTHORS
index f12b60e..9bea227 100644
--- a/AUTHORS
+++ b/AUTHORS
@@ -5,7 +5,8 @@ This package was written by and with the assistance of
Active LRDE Staff.
==================
-* Thierry Géraud theo(a)lrde.epita.fr
+* **Thierry Géraud** <theo(a)lrde.epita.fr>
+
- Management of the project in the first place.
- Type system.
- Revamp of Metalic (Olena pre-1.0).
@@ -14,11 +15,15 @@ Active LRDE Staff.
- Olena 1.0 core.
- Numerous hours spent thinking about Olena.
-* Guillaume Lazzara z(a)lrde.epita.fr
+
+* **Guillaume Lazzara** <z(a)lrde.epita.fr>
+
- SCRIBO module main author and maintainer.
- Library improvements.
-* Roland Levillain roland(a)lrde.epita.fr
+
+* **Roland Levillain** <roland(a)lrde.epita.fr>
+
- Project manager.
- Theoretical mentor.
- Renovation of Swilena (Olena 0.10a).
@@ -32,30 +37,41 @@ Active LRDE Staff.
Past contributors of LRDE Staff.
================================
-* Jérôme Darbon jerome(a)lrde.epita.fr
+* **Jérôme Darbon** <jerome(a)lrde.epita.fr>
+
- Image morphology and Olena morpho.
-* Réda Dehak reda(a)lrde.epita.fr
+
+* **Réda Dehak** <reda(a)lrde.epita.fr>
+
- Management of the project.
- Color conversions.
- Cleanup the sources.
-* Akim Demaille akim(a)lrde.epita.fr
+
+* **Akim Demaille** <akim(a)lrde.epita.fr>
+
- Configuration system.
- Maintenance of code's clearness.
-* Alexandre Duret-Lutz adl(a)lrde.epita.fr
+
+* **Alexandre Duret-Lutz** <adl(a)lrde.epita.fr>
+
- Maintenance of the source tree for several years.
- Type system.
- Test system.
- First documentation system.
- Numerous hours spent on Olena to make it better.
-* Fabien Freling fabien.freling(a)lrde.epita.fr
+
+* **Fabien Freling** <fabien.freling(a)lrde.epita.fr>
+
- Work on Melimage with IGR.
- Library improvements.
-* Yann Jacquelet jacquelet(a)lrde.epita.fr
+
+* **Yann Jacquelet** <jacquelet(a)lrde.epita.fr>
+
- SCRIBO-related work.
@@ -63,20 +79,25 @@ Past contributors of LRDE Staff.
Interns.
========
-* Arthur Crépin-Leblond
+* **Arthur Crépin-Leblond**
+
- SCRIBO structured outputs (XML, HTML, PDF).
-* Raphaël Boissel
+
+* **Raphaël Boissel**
- Graph implementation of a DIA toolchain in SCRIBO.
=========================
Students from Class 2012.
=========================
-* Frédéric Bour frederic.bour(a)lrde.epita.fr
+* **Frédéric Bour** <frederic.bour(a)lrde.epita.fr>
+
- Prototype of the upcoming functions system.
-* Julien Marquegnies julien.marquegnies(a)lrde.epita.fr
+
+* **Julien Marquegnies** <julien.marquegnies(a)lrde.epita.fr>
+
- Fast implementation of the Hough transform.
- Many improvements in SCRIBO layout analysis.
@@ -84,7 +105,8 @@ Students from Class 2012.
Students from Class 2011.
=========================
-* Edwin Carlinet edwin.carlinet(a)lrde.epita.fr
+* **Edwin Carlinet** <edwin.carlinet(a)lrde.epita.fr>
+
- Accumulators.
- Morphological connected filters.
@@ -92,32 +114,44 @@ Students from Class 2011.
Students from Class 2010.
=========================
-* Florent D'Halluin
+* **Florent D'Halluin**
+
- Document layout viewer.
-* Etienne Folio
+
+* **Etienne Folio**
+
- Color type traits.
=========================
Students from Class 2009.
=========================
-* Alexandre Abraham
+* **Alexandre Abraham**
+
- R/W functions.
- Topological watershed transform.
-* Nicolas Ballas
+
+* **Nicolas Ballas**
+
- Image properties.
- Data routines.
-* Vivien Delmon
+
+* **Vivien Delmon**
+
- Misc. corrections.
-* Matthieu Garrigues
+
+* **Matthieu Garrigues**
+
- Value types and I/Os.
- Draft of the FLLT.
-* Ugo Jardonnet
+
+* **Ugo Jardonnet**
+
- Geometrical transforms.
- ICP registration algorithm.
@@ -125,24 +159,32 @@ Students from Class 2009.
Students from Class 2008.
=========================
-* Tristan Croiset
+* **Tristan Croiset**
+
- Dynamic (Dyn) (Olena proto-1.0)
-* Quentin Hocquet
+
+* **Quentin Hocquet**
+
- SCOOL
-* Thomas Moulard
+
+* **Thomas Moulard**
+
- SCOOL
=========================
Students from Class 2007.
=========================
-* Christophe Berger
+* **Christophe Berger**
+
- Connected filters (Olena proto-1.0, Olena pre-1.0).
- SCOOP 2 (Olena proto-1.0).
-* Nicolas Widynski
+
+* **Nicolas Widynski**
+
- Connected filters (Olena proto-1.0).
- Inpainting (Olena proto-1.0).
- Segmentation (watershed transform, watersnake, snakes) (Olena proto-1.0)
@@ -151,16 +193,21 @@ Students from Class 2007.
Students from Class 2006.
=========================
-* Simon Odou
+* **Simon Odou**
+
- Contributions to document the code.
- Deconvolution.
- Generic morphers.
- SCOOP 2 (Olena proto-1.0).
-* Nicolas Pouillard
+
+* **Nicolas Pouillard**
+
- Dynamic-static bridge prototype (Dyn) (Olena proto-1.0).
-* Damien Thivolle
+
+* **Damien Thivolle**
+
- Contributions to document the code.
- Generic morphers.
- Configuration system.
@@ -174,14 +221,17 @@ Students from Class 2006.
Students from Class 2005.
=========================
-* Giovanni Palma
+* **Giovanni Palma**
+
- Color conversion.
- Attribute opening and closing algorithms.
- New documentation system.
- Document the code.
- Cleanup the sources.
-* Niels Van Vliet
+
+* **Niels Van Vliet**
+
- Color conversion.
- Attribute opening and closing algorithms.
- Contributions to document the code.
@@ -191,21 +241,28 @@ Students from Class 2005.
Students from Class 2004.
=========================
-* Sylvain Berlemont
+* **Sylvain Berlemont**
+
- Combinatorial maps.
- Cleanup the sources.
-* Nicolas Burrus
+
+* **Nicolas Burrus**
+
- Integre.
- Olena I/O.
- Source tree.
-* David Lesage
+
+* **David Lesage**
+
- Type system.
- New paradigm.
- Cleanup the sources.
-* Astrid Wang
+
+* **Astrid Wang**
+
- New documentation system.
- Static arrays implementation.
@@ -213,25 +270,36 @@ Students from Class 2004.
Students from Class 2003.
=========================
-* Ignacy Gawedzki
+* **Ignacy Gawedzki**
+
- Color system.
-* Jean-Sébastien Mouret
+
+* **Jean-Sébastien Mouret**
+
- Image I/O.
- Source tree and configuration system.
- Fast morphological operators.
-* Quôc Peyrot
+
+* **Quôc Peyrot**
+
- Watershed algorithm.
-* Raphaël Poss
+
+* **Raphaël Poss**
+
- Source tree and configuration system.
- Documentation.
-* Yann Régis-Gianas
+
+* **Yann Régis-Gianas**
+
- Type system.
-* Emmanuel Turquin
+
+* **Emmanuel Turquin**
+
- Implementation of transforms.
- Integre.
@@ -239,22 +307,29 @@ Students from Class 2003.
Students from Class 2002.
=========================
-* Jean Chalard
+* **Jean Chalard**
+
- Colors.
- Vectors and matrices implementation.
- Olena iterators implementation.
- Wavelets.
-* Rémi Coupet
+
+* **Rémi Coupet**
+
- Olena morpho.
- Data types (pre-0.6).
- Olena core.
- Bibliographical research.
-* Ludovic Perrine
+
+* **Ludovic Perrine**
+
- Fuzzy types.
-* Pierre-Yves Strub
+
+* **Pierre-Yves Strub**
+
- Olena morpho.
- Source tree and configuration system.
- Type system.
@@ -263,20 +338,28 @@ Students from Class 2002.
Students from Class 2001.
=========================
-* Alexis Angelidis
+* **Alexis Angelidis**
+
- Olena logo.
-* Vincent Berruchon
-* Renaud François
+* **Vincent Berruchon**
+
+
+* **Renaud François**
+
- Bibliographical research.
-* Anthony Pinagot
+
+* **Anthony Pinagot**
+
- Olena I/O.
- Statistical operators.
- FFT algorithm.
-* Michaël Strauss
+
+* **Michaël Strauss**
+
- Image morphology.
- Watershed algorithms.
- Olena I/O.
@@ -285,7 +368,8 @@ Students from Class 2001.
Students from Class 2000.
=========================
-* Yoann Fabre
+* **Yoann Fabre**
+
- Type system.
@@ -293,17 +377,24 @@ Students from Class 2000.
Other Contributors.
===================
-* Guillaume Duhamel
+* **Guillaume Duhamel**
+
- Low-level routines.
-* Simon Nivault
+
+* **Simon Nivault**
+
- Lemming object.
- Many cleanups.
-* Dimitri Papadopoulos-Orfanos
+
+* **Dimitri Papadopoulos-Orfanos**
+
- Type system.
-* Heru Xue
+
+* **Heru Xue**
+
- Color system.
diff --git a/doc/Doxyfile.in b/doc/Doxyfile.in
index a0fcf00..86377ef 100644
--- a/doc/Doxyfile.in
+++ b/doc/Doxyfile.in
@@ -40,7 +40,7 @@ MULTILINE_CPP_IS_BRIEF = NO
INHERIT_DOCS = YES
SEPARATE_MEMBER_PAGES = NO
TAB_SIZE = 8
-ALIASES =
+ALIASES = " "
OPTIMIZE_OUTPUT_FOR_C = NO
OPTIMIZE_OUTPUT_JAVA = NO
BUILTIN_STL_SUPPORT = YES
@@ -92,17 +92,15 @@ WARN_LOGFILE =
# configuration options related to the input files
#---------------------------------------------------------------------------
INPUT = @top_srcdir@/doc
-FILE_PATTERNS = *.cc \
- *.hh \
- *.hxx \
- *.hcc
+FILE_PATTERNS = *.dox
RECURSIVE = YES
EXCLUDE = @top_srcdir@/milena/ \
@top_srcdir@/scribo/
EXCLUDE_SYMLINKS = YES
EXCLUDE_PATTERNS = *spe.hh
EXCLUDE_SYMBOLS = *::internal* *_ mln::trait::*
-EXAMPLE_PATH =
+EXAMPLE_PATH = @top_srcdir@/doc/ \
+ @builddir@
EXAMPLE_PATTERNS = *.cc \
*.cc.raw \
*.txt
diff --git a/doc/DoxygenLayout.xml b/doc/DoxygenLayout.xml
index a279910..0dbf0d7 100644
--- a/doc/DoxygenLayout.xml
+++ b/doc/DoxygenLayout.xml
@@ -2,7 +2,7 @@
<!-- Navigation index tabs for HTML output -->
<navindex>
<tab type="mainpage" visible="yes"
title="Home"/>
- <tab type="pages" visible="no" title=""/>
+ <tab type="pages" visible="yes" title=""/>
<tab type="modules" visible="no" title=""/>
<tab type="namespaces" visible="no" title="">
<tab type="namespaces" visible="no" title=""/>
diff --git a/doc/Makefile.am b/doc/Makefile.am
index e733ab9..1bf95d5 100644
--- a/doc/Makefile.am
+++ b/doc/Makefile.am
@@ -53,13 +53,17 @@ regen-doc:
DOXYFILE_USER = Doxyfile_user
-REFMAN_deps = $(srcdir)/footer.html \
- $(srcdir)/header.html \
- $(srcdir)/doxygen.css \
- $(srcdir)/mainpage.hh \
- $(srcdir)/DoxygenLayout.xml \
- $(srcdir)/modules_list.hh \
- $(srcdir)/olena.bib
+REFMAN_deps = $(srcdir)/footer.html \
+ $(srcdir)/header.html \
+ $(srcdir)/doxygen.css \
+ $(srcdir)/mainpage.dox \
+ $(srcdir)/DoxygenLayout.xml \
+ $(srcdir)/modules_list.dox \
+ $(srcdir)/olena.bib \
+ $(srcdir)/contributors.dox \
+ $(srcdir)/bibliography.dox \
+ contributors.html \
+ lrde_olena.html
# Sed is used to generate Doxyfile from Doxyfile.in instead of
# configure, because the former is way faster than the latter.
@@ -113,8 +117,36 @@ $(srcdir)/$(USER_REFMAN): $(srcdir)/user-refman.stamp
EXTRA_DIST += $(USER_REFMAN)
+## -------------- ##
+## Contributors. ##
+## -------------- ##
+
+contributors.html: $(top_srcdir)/AUTHORS
+ rst2html --stylesheet-path=$(srcdir)/doxygen.css --link-stylesheet \
+ --input-encoding=latin1 --output-encoding=latin1 \
+ $(top_srcdir)/AUTHORS > contributors.html
+
+
+## -------------- ##
+## Bibliography. ##
+## -------------- ##
+
+# Warning: also generate lrde_olena.cite
+lrde_olena.bib:
+ bib2bib -s plain -oc lrde_olena.cite -ob lrde_olena.bib -c \
+ 'project = "Image" or project = "Olena"' \
+ -s year -r $(srcdir)/lrde.bib
+
+# Warning: also generate lrde_olena_bib.html
+lrde_olena.html: lrde_olena.bib
+ bibtex2html -s plain -nokeywords -citefile lrde_olena.cite \
+ -noabstract -nofooter -labelname -nodoc -linebreak -nf urllrde PDF lrde_olena.bib
+MAINTAINERCLEANFILES += lrde_olena.cite \
+ lrde_olena.bib \
+ lrde_olena_bib.html \
+ lrde_olena.html
## ---------- ##
## Cleaning. ##
diff --git a/doc/bibliography.dox b/doc/bibliography.dox
new file mode 100644
index 0000000..59518cb
--- /dev/null
+++ b/doc/bibliography.dox
@@ -0,0 +1,37 @@
+/** \page bibliography Bibliography
+
+ In this page, all publications used to implement Olena or related to Olena itself are
referenced.
+
+ Summary:
+ \li \ref olnpublis
+ \li \ref olnpublisjournals
+ \li \ref olnpublisconfs
+ \li \ref olnpublisposters
+ \li \ref olnpublistechreps
+
+ \li \ref{burrus.03.mpool}
+ \li \ref{darbon.02.ismm}
+ \li \ref{darbon.04.ecoopphd}
+ \li \cite duret.00.gcse
+ \li \cite geraud.00.icpr
+ \li \cite geraud.01.ai
+ \li \cite geraud.08.mpool
+ \li \cite geraud.99.gretsi
+ \li \cite lazzara.11.icdar
+ \li \cite levillain.09.ismm
+ \li \cite levillain.10.icip
+ \li \cite levillain.10.wadgmm
+ \li \cite levillain.11.gretsi
+
+ \section olnpublis Olena Publications
+
+ \subsection olnpublisjournals In Journals
+
+ \subsection olnpublisconfs In Conferences
+ \htmlinclude lrde_olena.html
+
+ \subsection olnpublisposters Posters
+
+ \subsection olnpublistechreps Student Technical Reports
+
+ */
diff --git a/doc/bibliography.hh b/doc/bibliography.hh
deleted file mode 100644
index 841372b..0000000
--- a/doc/bibliography.hh
+++ /dev/null
@@ -1,19 +0,0 @@
-/** \page bibliography Bibliography
-
- Here you can find all the articles related to the Olena platform.
-
- \li \cite burrus.03.mpool
- \li \cite darbon.02.ismm
- \li \cite darbon.04.ecoopphd
- \li \cite duret.00.gcse
- \li \cite geraud.00.icpr
- \li \cite geraud.01.ai
- \li \cite geraud.08.mpool
- \li \cite geraud.99.gretsi
- \li \cite lazzara.11.icdar
- \li \cite levillain.09.ismm
- \li \cite levillain.10.icip
- \li \cite levillain.10.wadgmm
- \li \cite levillain.11.gretsi
-
- */
diff --git a/doc/contributors.dox b/doc/contributors.dox
new file mode 100644
index 0000000..82b35c6
--- /dev/null
+++ b/doc/contributors.dox
@@ -0,0 +1,5 @@
+/** \page Contributors List of contributors
+
+ \htmlinclude contributors.html
+
+ */
diff --git a/doc/lrde.bib b/doc/lrde.bib
new file mode 100644
index 0000000..c1d6a03
--- /dev/null
+++ b/doc/lrde.bib
@@ -0,0 +1,3856 @@
+ %% ----------------------- %%
+ %% The LRDE Bibliography. %%
+ %% ----------------------- %%
+
+%% This document lists all the publications by members of the LRDE.
+%% $Id: lrde.bib 4195 2011-10-11 10:08:20Z levill_r $
+%%
+%% See
+%%
+%% -
http://www.lrde.epita.fr
+%% for more information about the LRDE
+%%
+%% -
http://publis.lrde.epita.fr
+%% for more information about these publications
+%%
+%% -
http://www.lrde.epita.fr/dload/papers/lrde.bib
+%% for the most recent version of this BibTeX file
+
+@InProceedings{ angelidis.01.wscg,
+ author = {Alexis Angelidis and Geoffroy Fouquier},
+ title = {Visualization issues in virtual environments: from
+ computer graphics techniques to intentional visualization},
+ booktitle = {Proceedings of the 9th International Conference in Central
+ Europe on Computer Graphics, Visualization and Computer
+ Vision (WSCG)},
+ year = 2001,
+ editor = {V. Skala},
+ volume = 3,
+ pages = {90--98},
+ address = {Plzen, Czech Republic},
+ month = feb,
+ isbn = {80-7082-713-0},
+ project = {Urbi},
+ urllrde = {200102-Wscg},
+ abstract = {Rendering efficiently large virtual environment scenes
+ composed of many elements, dynamic objects, and a highly
+ moving viewpoint is a major issue. This paper focuses on
+ the first of the two viewing stage operations: required
+ elements determination, the second being shading/filtering.
+ We propose a classification, extending the existing
+ computer graphic techniques toward display scalability
+ requirements, that distinguishes two key points: keeping
+ only required elements (culling), and keeping only required
+ details (which includes traditional LODs). The mechanisms
+ needed for display scalability are presented.}
+}
+
+@InProceedings{ baillard.05.adass,
+ author = {Anthony Baillard and Emmanuel Bertin and Yannic Mellier
+ and Henry Joy {McCracken} and {\relax Th}ierry G\'eraud and
+ Roser Pell\'o and Jean-Fran{\,c}ois {LeBorgne} and Pascal
+ Fouqu\'e},
+ title = {Project {EFIGI}: Automatic classification of galaxies},
+ year = 2005,
+ booktitle = {Astronomical Data Analysis Software and Systems XV},
+ volume = 351,
+ pages = {236--239},
+ publisher = {Astronomical Society of the Pacific},
+ series = {Conference},
+ url = {http://www.aspbooks.org/custom/publications/paper/index.phtml?paper_id=3398}
+ ,
+ editor = {Carlos Gabriel and {\relax Ch}ristophe Arviset and Daniel
+ Ponz and Enrique Solano},
+ isbn = {1-58381-219-9},
+ project = {Image},
+ urllrde = {200512-ADASS},
+ abstract = {We propose an automatic system to classify images of
+ galaxies with varying resolution. Morphologically typing
+ galaxies is a difficult task in particular for distant
+ galaxies convolved by a point-spread function and suffering
+ from a poor signal-to-noise ratio. In the context of the
+ first phase of the project EFIGI (extraction of the
+ idealized shapes of galaxies in imagery), we present the
+ three steps of our software: cleaning, dimensionality
+ reduction and supervised learning. We present preliminary
+ results derived from a subset of 774 galaxies from the
+ Principal Galaxies Catalog and compare them to human
+ classifications made by astronomers. We use g-band images
+ from the Sloan Digital Sky Survey. Finally, we discuss
+ future improvements which we intend to implement before
+ releasing our tool to the community.}
+}
+
+@InProceedings{ baillard.07.gretsi,
+ author = {Anthony Baillard and {\relax Ch}ristophe Berger and
+ Emmanuel Bertin and {\relax Th}ierry G\'eraud and Roland
+ Levillain and Nicolas Widynski},
+ title = {Algorithme de calcul de l'arbre des composantes avec
+ applications \`a la reconnaissance des formes en imagerie
+ satellitaire},
+ booktitle = {Proceedings of the 21st Symposium on Signal and Image
+ Processing (GRETSI)},
+ category = {national},
+ year = 2007,
+ address = {Troyes, France},
+ month = sep,
+ project = {Image},
+ urllrde = {200705-GRETSI},
+ abstract = {In this paper a new algorithm to compute the component
+ tree is presented. As compared to the state-of-the-art,
+ this algorithm does not use excessive memory and is able to
+ work efficiently on images whose values are highly
+ quantized or even with images having floating values. We
+ also describe how it can be applied to astronomical data to
+ identify relevant objects.}
+}
+
+@InProceedings{ bensalem.11.sumo,
+ author = {Ala Eddine Ben{ S}alem and Alexandre Duret-Lutz and
+ Fabrice Kordon},
+ title = {Generalized {B\"u}chi Automata versus Testing Automata for
+ Model Checking},
+ booktitle = {Proceedings of the second International Workshop on
+ Scalable and Usable Model Checking for Petri Net and other
+ models of Concurrency (SUMO'11)},
+ address = {Newcastle, UK},
+ series = {Workshop Proceedings},
+ year = 2011,
+ month = jun,
+ volume = 626,
+ publisher = {CEUR},
+ urllrde = {201106-SUMO},
+ url = {http://ftp.informatik.rwth-aachen.de/Publications/CEUR-WS/Vol-726/}
+
+}
+
+@InProceedings{ berger.07.icip,
+ author = {{\relax Ch}ristophe Berger and {\relax Th}ierry G\'eraud
+ and Roland Levillain and Nicolas Widynski and Anthony
+ Baillard and Emmanuel Bertin},
+ title = {Effective Component Tree Computation with Application to
+ Pattern Recognition in Astronomical Imaging},
+ booktitle = {Proceedings of the IEEE International Conference on Image
+ Processing (ICIP)},
+ pages = {IV-41--IV-44},
+ volume = 4,
+ year = 2007,
+ address = {San Antonio, TX, USA},
+ month = sep,
+ project = {Image},
+ urllrde = {200705-ICIP},
+ abstract = {In this paper a new algorithm to compute the component
+ tree is presented. As compared to the state of the art,
+ this algorithm does not use excessive memory and is able to
+ work efficiently on images whose values are highly
+ quantized or even with images having floating values. We
+ also describe how it can be applied to astronomical data to
+ identify relevant objects.}
+}
+
+@Article{ bloch.03.ai,
+ author = {Isabelle Bloch and {\relax Th}ierry G\'eraud and Henri
+ Ma\^itre},
+ title = {Representation and fusion of heterogeneous fuzzy
+ information in the {3D} space for model-based structural
+ recognition---application to {3D} brain imaging},
+ journal = {Artificial Intelligence},
+ month = aug,
+ year = 2003,
+ volume = 148,
+ number = {1-2},
+ pages = {141--175},
+ project = {Image},
+ urllrde = {200308-AI},
+ abstract = {We present a novel approach of model-based pattern
+ recognition where structural information and spatial
+ relationships have a most important role. It is illustrated
+ in the domain of 3D brain structure recognition using an
+ anatomical atlas. Our approach performs simultaneously
+ segmentation and recognition of the scene and the solution
+ of the recognition task is progressive, processing
+ successively different objects, using different of
+ knowledge about the object and about relationships between
+ objects. Therefore the core of the approach is the
+ representation part, and constitutes the main contribution
+ of this paper. We make use of a spatial representation of
+ each piece of information, as a spatial set representing a
+ constraint to be satisfied by the searched object, thanks
+ in particular to fuzzy mathematical operations. Fusion of
+ these constraints allows to, segment and recognize the
+ desired object.}
+}
+
+@Article{ bloch.05.prl,
+ author = {Isabelle Bloch and Olivier Colliot and Oscar Camara and
+ {\relax Th}ierry G\'eraud},
+ title = {Fusion of spatial relationships for guiding recognition,
+ example of brain structure recognition in {3D} {MRI}},
+ journal = {Pattern Recognition Letters},
+ year = 2005,
+ volume = 26,
+ number = 4,
+ month = mar,
+ pages = {449--457},
+ project = {Image},
+ urllrde = {200407-PRL},
+ abstract = {Spatial relations play an important role in recognition of
+ structures embedded in a complex environment and for
+ reasoning under imprecision. Several types of relationships
+ can be modeled in a unified way using fuzzy mathematical
+ morphology. Their combination benefits from the powerful
+ framework of fuzzy set theory for fusion tasks and decision
+ making. This paper presents several methods of fusion of
+ information about spatial relationships and illustrates
+ them on the example of model-based recognition of brain
+ structures in 3D magnetic resonance imaging.}
+}
+
+@Article{ borghi.06.crossroads,
+ author = {Alexandre Borghi and Valentin David and Akim Demaille},
+ title = {{C}-{T}ransformers --- {A} Framework to Write {C} Program
+ Transformations},
+ journal = {ACM Crossroads},
+ year = 2006,
+ volume = 12,
+ number = 3,
+ month = {Spring},
+ project = {Transformers},
+ note = {\url{http://www.acm.org/crossroads/xrds12-3/contractc.html}}
+ ,
+ urllrde = {200510-Crossroads},
+ abstract = {Program transformation techniques have reached a maturity
+ level that allows processing high-level language sources in
+ new ways. Not only do they revolutionize the implementation
+ of compilers and interpreters, but with modularity as a
+ design philosophy, they also permit the seamless extension
+ of the syntax and semantics of existing programming
+ languages. The C-Transformers project provides a
+ transformation environment for C, a language that proves to
+ be hard to transform. We demonstrate the effectiveness of
+ C-Transformers by extending C's instructions and control
+ flow to support Design by Contract. C-Transformers is
+ developed by members of the LRDE: EPITA undergraduate
+ students.}
+}
+
+@InProceedings{ burrus.03.mpool,
+ author = {Nicolas Burrus and Alexandre Duret-Lutz and {\relax
+ Th}ierry G\'eraud and David Lesage and Rapha\"el Poss},
+ title = {A static {C++} object-oriented programming ({SCOOP})
+ paradigm mixing benefits of traditional {OOP} and generic
+ programming},
+ booktitle = {Proceedings of the Workshop on Multiple Paradigm with
+ Object-Oriented Languages (MPOOL)},
+ year = 2003,
+ address = {Anaheim, CA, USA},
+ month = oct,
+ project = {Olena},
+ urllrde = {200310-MPOOL},
+ abstract = {Object-oriented and generic programming are both supported
+ in C++. OOP provides high expressiveness whereas GP leads
+ to more efficient programs by avoiding dynamic typing. This
+ paper presents SCOOP, a new paradigm which enables both
+ classical OO design and high performance in C++ by mixing
+ OOP and GP. We show how classical and advanced OO features
+ such as virtual methods, multiple inheritance, argument
+ covariance, virtual types and multimethods can be
+ implemented in a fully statically typed model, hence
+ without run-time overhead.}
+}
+
+@InProceedings{ cadilhac.06.avocs,
+ author = {Micha\"el Cadilhac and {\relax Th}omas H\'erault and
+ Richard Lassaigne and Sylvain Peyronnet and Sebastien
+ Tixeuil},
+ title = {Evaluating complex {MAC} protocols for sensor networks
+ with {APMC}},
+ booktitle = {Proceedings of the 6th International Workshop on Automated
+ Verification of Critical Systems (AVoCS)},
+ year = 2006,
+ series = {Electronic Notes in Theoretical Computer Science Series},
+ pages = {33--46},
+ volume = 185,
+ project = {APMC},
+ urllrde = {200606-Avocs},
+ abstract = {In this paper we present an analysis of a MAC (Medium
+ Access Control) protocol for wireless sensor networks. The
+ purpose of this protocol is to manage wireless media access
+ by constructing a Time Division Media Access (TDMA)
+ schedule. APMC (Approximate Probabilistic Model Checker) is
+ a tool that uses approximation-based verification
+ techniques in order to analyse the behavior of complex
+ probabilistic systems. Using APMC, we approximately
+ computed the probabilities of several properties of the MAC
+ protocol being studied, thus giving some insights about its
+ performance.}
+}
+
+@InProceedings{ carlier.02.itrs,
+ author = {S\'ebastien Carlier},
+ title = {Polar type inference with intersection types and $\omega$},
+ booktitle = {Proceedings of the 2nd Workshop on Intersection Types and
+ Related Systems (ITRS), published in: Electronic Notes in
+ Theoretical Computer Science},
+ volume = 70,
+ issue = 1,
+ publisher = {Elsevier},
+ year = 2002,
+ address = {Copenhagen, Denmark},
+ month = jul,
+ project = {Software},
+ urllrde = {200207-ITRS},
+ abstract = {We present a type system featuring intersection types and
+ omega, a type constant which is assigned to unused terms.
+ We exploit and extend the technology of expansion variables
+ from the recently developed System I, with which we believe
+ our system shares many interesting properties, such as
+ strong normalization, principal typings, and compositional
+ analysis. Our presentation emphasizes a polarity discipline
+ and shows its benefits. We syntactically distinguish
+ positive and negative types, and give them different
+ interpretations. We take the point of view that the
+ interpretation of a type is intrinsic to it, and should not
+ change implicitly when it appears at the opposite polarity.
+ Our system is the result of a process which started with an
+ extension of Trevor Jim's Polar Type System.}
+}
+
+@InProceedings{ chekroun.06.iciar,
+ author = {Mickael Chekroun and J\'er\^ome Darbon and Igor Ciril},
+ title = {On a Polynomial Vector Field Model for Shape
+ Representation},
+ booktitle = {Proceedings of the International Conference on Image
+ Analysis and Recognition (ICIAR)},
+ publisher = {Springer-Verlag},
+ year = 2006,
+ address = {Povoa de Varzim, Portugal},
+ month = sep,
+ project = {Image},
+ urllrde = {200609-ICIAR},
+ abstract = {In this paper we propose an efficient algorithm to perform
+ a polynomial approximation of the vector field derived from
+ the usual distance mapping method. The main ingredients
+ consist of minimizing a quadratic functional and
+ transforming this problem in an appropriate setting for
+ implementation. With this approach, we reduce the problem
+ of obtaining an approximating polynomial vector field to
+ the resolution of a not expansive linear algebraic system.
+ By this procedure, we obtain an analytical shape
+ representation that relies only on some coefficients.
+ Fidelity and numerical efficiency of our approach are
+ presented on illustrative examples.}
+}
+
+@InProceedings{ claveirole.05.ciaa,
+ author = {{\relax Th}omas Claveirole and Sylvain Lombardy and Sarah
+ O'Connor and Louis-No\"el Pouchet and Jacques Sakarovitch},
+ title = {Inside {V}aucanson},
+ booktitle = {Proceedings of Implementation and Application of Automata,
+ 10th International Conference (CIAA)},
+ year = 2005,
+ pages = {117--128},
+ editor = {Springer-Verlag},
+ volume = 3845,
+ series = {Lecture Notes in Computer Science Series},
+ address = {Sophia Antipolis, France},
+ month = jun,
+ urllrde = {200506-Ciaa},
+ project = {Vaucanson},
+ abstract = {This paper presents some features of the Vaucanson
+ platform. We describe some original algorithms on weighted
+ automata and transducers (computation of the quotient,
+ conversion of a regular expression into a weighted
+ automaton, and composition). We explain how complex
+ declarations due to the generic programming are masked from
+ the user and finally we present a proposal for an XML
+ format that allows implicit descriptions for simple types
+ of automata.}
+}
+
+@TechReport{ clouard.99.tr,
+ author = {R\'egis Clouard and Abderrahim Elmoataz and Fran\c{c}ois
+ Angot and Olivier Lezoray and Alexandre Duret-Lutz},
+ title = {Une biblioth\`eque et un environnement de programmation
+ d'op\'erateurs de traitement d'images},
+ institution = {GREYC-ISMRA},
+ year = 1999,
+ number = 99008,
+ address = {Caen, France},
+ month = nov,
+ url = {http://www.greyc.ismra.fr/~regis/Pandore/},
+ project = {Olena},
+ urllrde = {199911-TR}
+}
+
+@InProceedings{ darbon.01.ei,
+ author = {J\'er\^ome Darbon and Bulent Sankur and Henri Ma\^{\i}tre},
+ title = {Error correcting code performance for watermark
+ protection},
+ booktitle = {Proceedings of the 13th Symposium SPIE on Electronic
+ Imaging----Security and Watermarking of Multimedia Contents
+ III (EI27)},
+ year = 2001,
+ address = {San Jose, CA, USA},
+ month = jan,
+ volume = 4314,
+ editors = {P.W. Wong and E.J. Delp III},
+ pages = {663--672},
+ project = {Image},
+ urllrde = {200101-Ei},
+ abstract = {The watermark signals are weakly inserted in images due to
+ imperceptibility constraints which makes them prone to
+ errors in the extraction stage. Although the error
+ correcting codes can potentially improve their performance
+ one must pay attention to the fact that the watermarking
+ channel is in general very noisy. We have considered the
+ trade-off of the BCH codes and repetition codes in various
+ concatenation modes. At the higher rates that can be
+ encountered in watermarking channels such as due to
+ low-quality JPEG compression, codes like the BCH codes
+ cease being useful. Repetition coding seems to be the last
+ resort at these error rates of 25\% and beyond. It has been
+ observed that there is a zone of bit error rate where their
+ concatenation turns out to be more useful. In fact the
+ concatenation of repetition and BCH codes judiciously
+ dimensioned, given the available number of insertion sites
+ and the payload size, achieves a higher reliability level.}
+}
+
+@InProceedings{ darbon.02.ismm,
+ author = {J\'er\^ome Darbon and {\relax Th}ierry G\'eraud and
+ Alexandre Duret-Lutz},
+ title = {Generic implementation of morphological image operators},
+ booktitle = {Mathematical Morphology, Proceedings of the 6th
+ International Symposium (ISMM)},
+ pages = {175--184},
+ year = 2002,
+ address = {Sydney, Australia},
+ month = apr,
+ publisher = {CSIRO Publishing},
+ project = {Olena},
+ urllrde = {200204-Ismm},
+ abstract = {Several libraries dedicated to mathematical morphology
+ exist. But they lack genericity, that is to say, the
+ ability for operators to accept input of different natures
+ ---2D binary images, graphs enclosing floating values, etc.
+ We describe solutions which are integrated in Olena, a
+ library providing morphological operators. We demonstrate
+ with some examples that translating mathematical formulas
+ and algorithms into source code is made easy and safe with
+ Olena. Moreover, experimental results show that no extra
+ costs at run-time are induced.}
+}
+
+@InProceedings{ darbon.04.ecoopphd,
+ author = {J\'er\^ome Darbon and {\relax Th}ierry G\'eraud and
+ Patrick Bellot},
+ title = {Generic algorithmic blocks dedicated to image processing},
+ booktitle = {Proceedings of the ECOOP Workshop for PhD Students},
+ year = 2004,
+ address = {Oslo, Norway},
+ month = jun,
+ project = {Olena},
+ urllrde = {200406-ECOOPPHD},
+ abstract = {This paper deals with the implementation of algorithms in
+ the specific domain of image processing. Although many
+ image processing libraries are available, they generally
+ lack genericity and flexibility. Many image processing
+ algorithms can be expressed as compositions of elementary
+ algorithmic operations referred to as blocks. Implementing
+ these compositions is achieved using generic programming.
+ Our solution is compared to previous ones and we
+ demonstrate it on a class image processing algorithms.}
+}
+
+@InProceedings{ darbon.04.iwcia,
+ author = {J\'er\^ome Darbon and Marc Sigelle},
+ title = {Exact optimization of discrete constrained total variation
+ minimization problems},
+ booktitle = {Proceedings of the 10th International Workshop on
+ Combinatorial Image Analysis (IWCIA)},
+ year = 2004,
+ address = {Auckland, New Zealand},
+ month = dec,
+ pages = {548--557},
+ editors = {R. Klette and J. Zunic},
+ series = {Lecture Notes in Computer Science Series},
+ publisher = {Springer-Verlag},
+ volume = 3322,
+ project = {Image},
+ urllrde = {200412-IWCIA},
+ abstract = {This paper deals with the total variation minimization
+ problem when the fidelity is either the $L^2$-norm or the
+ $L^1$-norm. We propose an algorithm which computes the
+ exact solution of these two problems after discretization.
+ Our method relies on the decomposition of an image into its
+ level sets. It maps the original problems into independent
+ binary Markov Random Field optimization problems associated
+ with each level set. Exact solutions of these binary
+ problems are found thanks to minimum-cut techniques. We
+ prove that these binary solutions are increasing and thus
+ allow to reconstruct the solution of the original
+ problems.}
+}
+
+@TechReport{ darbon.04.tr,
+ author = {J\'er\^ome Darbon and Marc Sigelle},
+ title = {Exact optimization of discrete constrained total variation
+ minimization problems},
+ institution = {ENST},
+ year = 2004,
+ number = {2004C004},
+ address = {Paris, France},
+ month = oct,
+ project = {Image},
+ annote = {This technical report corresponds to the publication
+ darbon.04.iwcia. ; 200412-IWCIA},
+ urllrde = {200410-TR},
+ abstract = {This paper deals with the total variation minimization
+ problem when the fidelity is either the $L^2$-norm or the
+ $L^1$-norm. We propose an algorithm which computes the
+ exact solution of these two problems after discretization.
+ Our method relies on the decomposition of an image into its
+ level sets. It maps the original problems into independent
+ binary Markov Random Field optimization problems associated
+ with each level set. Exact solutions of these binary
+ problems are found thanks to minimum-cut techniques. We
+ prove that these binary solutions are increasing and thus
+ allow to reconstruct the solution of the original
+ problems.}
+}
+
+@InProceedings{ darbon.05.eusipco,
+ author = {J\'er\^ome Darbon and Ceyhun Burak Akg\"ul},
+ title = {An efficient algorithm for attribute openings and
+ closings},
+ booktitle = {Proceedings of the 13th European Signal Processing
+ Conference (EUSIPCO)},
+ year = 2005,
+ address = {Antalya, Turkey},
+ month = sep,
+ project = {Image},
+ urllrde = {200509-Eusipco},
+ abstract = {In this paper, we present fast algorithms for area opening
+ and closing on grayscale images. Salembier's max-tree based
+ algorithm is one of the well known methods to perform area
+ opening. It makes use of a special representation where
+ each node in the tree stands for a flat region and the tree
+ itself is oriented towards the maxima of the grayscale
+ image. Pruning the tree with respect to some attribute,
+ e.g., the area, boils down to attribute opening. Following
+ the same approach, we propose an algorithm for area opening
+ (closing) without building the max-tree (min-tree). Our
+ algorithm exhibit considerable performance compared to the
+ state-of-the art in this domain.}
+}
+
+@InProceedings{ darbon.05.ibpria,
+ author = {J\'er\^ome Darbon and Marc Sigelle},
+ title = {A Fast and Exact Algorithm for Total Variation
+ Minimization},
+ booktitle = {Proceedings of the 2nd Iberian Conference on Pattern
+ Recognition and Image Analysis (IbPRIA)},
+ publisher = {Springer-Verlag},
+ volume = 3522,
+ pages = {351--359},
+ year = 2005,
+ address = {Estoril, Portugal},
+ month = jun,
+ project = {Image},
+ urllrde = {200506-IbPria},
+ abstract = {This paper deals with the minimization of the total
+ variation under a convex data fidelity term. We propose an
+ algorithm which computes an exact minimizer of this
+ problem. The method relies on the decomposition of an image
+ into its level sets. Using these level sets, we map the
+ problem into optimizations of independent binary Markov
+ Random Fields. Binary solutions are found thanks to
+ graph-cut techniques and we show how to derive a fast
+ algorithm. We also study the special case when the fidelity
+ term is the $L^1$-norm. Finally we provide some
+ experiments.}
+}
+
+@InProceedings{ darbon.05.ispa,
+ author = {J\'er\^ome Darbon},
+ title = {Total Variation Minimization with $L^1$ Data Fidelity as a
+ Contrast Invariant Filter},
+ booktitle = {Proceedings of the 4th International Symposium on Image
+ and Signal Processing and Analysis (ISPA 2005)},
+ year = 2005,
+ address = {Zagreb, Croatia},
+ month = sep,
+ pages = {221--226},
+ project = {Image},
+ urllrde = {200509-Ispa},
+ abstract = {This paper sheds new light on minimization of the total
+ variation under the $L^1$-norm as data fidelity term
+ ($L^1+TV$) and its link with mathematical morphology. It is
+ well known that morphological filters enjoy the property of
+ being invariant with respect to any change of contrast.
+ First, we show that minimization of $L^1+TV$ yields a
+ self-dual and contrast invariant filter. Then, we further
+ constrain the minimization process by only optimizing the
+ grey levels of level sets of the image while keeping their
+ boundaries fixed. This new constraint is maintained thanks
+ to the Fast Level Set Transform which yields a complete
+ representation of the image as a tree. We show that this
+ filter can be expressed as a Markov Random Field on this
+ tree. Finally, we present some results which demonstrate
+ that these new filters can be particularly useful as a
+ preprocessing stage before segmentation.}
+}
+
+@InProceedings{ darbon.05.isvc,
+ author = {J\'er\^ome Darbon and Sylvain Peyronnet},
+ title = {A Vectorial Self-Dual Morphological Filter based on Total
+ Variation Minimization},
+ booktitle = {Proceedings of the First International Conference on
+ Visual Computing},
+ year = 2005,
+ address = {Lake Tahoe, Nevada, USA},
+ month = dec,
+ project = {Image},
+ pages = {388--395},
+ series = {Lecture Notes in Computer Science Series},
+ publisher = {Springer-Verlag},
+ volume = 3804,
+ urllrde = {200512-Isvc},
+ abstract = {We present a vectorial self dual morphological filter.
+ Contrary to many methods, our approach does not require the
+ use of an ordering on vectors. It relies on the
+ minimization of the total variation with $L^1$ norm as data
+ fidelity on each channel. We further constraint this
+ minimization in order not to create new values. It is shown
+ that this minimization yields a self-dual and contrast
+ invariant filter. Although the above minimization is not a
+ convex problem, we propose an algorithm which computes a
+ global minimizer. This algorithm relies on minimum cost
+ cut-based optimizations.}
+}
+
+@PhDThesis{ darbon.05.phd,
+ author = {J\'er\^ome Darbon},
+ title = {Composants logiciels et algorithmes de minimisation exacte
+ d'\'energies d\'edid\'ees au traitement d'images},
+ school = {\'Ecole Nationale Sup\'erieure des T\'el\'ecommunications
+ de Paris (ENST)},
+ address = {Paris, France},
+ year = 2005,
+ month = oct,
+ number = {XXX},
+ note = {In French},
+ urllrde = {200510-PhD},
+ abstract = {Dans cette th\`ese nous \'etudions la minimisation
+ d'\'energies markoviennes rencontr\'ees dans les domaines
+ du traitement des images et de la vision par ordinateur.
+ Nous proposons des algorithmes de minimisation exacte pour
+ diff\'erents types d'\'energies. Ces algorithmes ont
+ l'int\'er\^et de fournir un minimum global quand bien
+ m\^eme l'\'energie n'est pas convexe. Enfin, nous mettons
+ en \'evidence quelques liens entre les champs de Markov
+ binaires et la morphologie math\'ematique. La version
+ finale de ce manuscrit suit les recommandations des rapporteurs.}
+}
+
+@TechReport{ darbon.05.tr,
+ author = {J\'er\^ome Darbon and Marc Sigelle},
+ title = {A fast and exact algorithm for total variation
+ minimization},
+ institution = {ENST},
+ year = 2005,
+ number = {2005D002},
+ address = {Paris, France},
+ month = jan,
+ project = {Image},
+ annote = {This technical report corresponds to the publication
+ darbon.05.ibpria.},
+ urllrde = {200501-TR},
+ abstract = {This paper deals with the minimization of the total
+ variation under a convex data fidelity term. We propose an
+ algorithm which computes an exact minimizer of this
+ problem. The method relies on the decomposition of an image
+ into its level sets. Using these level sets, we map the
+ problem into optimizations of independent binary Markov
+ Random Fields. Binary solutions are found thanks to
+ graph-cut techniques and we show how to derive a fast
+ algorithm. We also study the special case when the fidelity
+ term is the $L^1$-norm. Finally we provide some
+ experiments.}
+}
+
+@InProceedings{ darbon.06.iccp,
+ author = {J\'er\^ome Darbon and Richard Lassaigne and Sylvain
+ Peyronnet},
+ title = {Approximate Probabilistic Model Checking for Programs},
+ booktitle = {Proceedings of the {IEEE} 2nd International Conference on
+ Intelligent Computer Communication and Processing
+ ({ICCP'06)}},
+ year = 2006,
+ address = {Technical University of Cluj-Napoca, Romania},
+ month = sep,
+ urllrde = {200606-Iccp2},
+ abstract = {In this paper we deal with the problem of applying model
+ checking to real programs. We verify a program without
+ constructing the whole transition system using a technique
+ based on Monte-Carlo sampling, also called ``approximate
+ model checking''. This technique combines model checking
+ and randomized approximation. Thus, it avoids the so called
+ state space explosion phenomenon. We propose a prototype
+ implementation that works directly on C source code. It
+ means that, contrary to others approaches, we do not need
+ to use a specific language nor specific data structures in
+ order to describe the system we wish to verify. Finally, we
+ present experimental results that show the effectiveness of
+ the approach applied to finding bugs in real programs.}
+}
+
+@Article{ darbon.06.jmiv,
+ author = {J\'er\^ome Darbon and Marc Sigelle},
+ title = {Image restoration with discrete constrained {T}otal
+ {Variation}---Part~{I}: Fast and exact optimization},
+ journal = {Journal of Mathematical Imaging and Vision},
+ year = 2006,
+ volume = 26,
+ number = 3,
+ month = dec,
+ pages = {261--276},
+ project = {Image},
+ urllrde = {2006XXX-JMIVa},
+ abstract = {This paper deals with the total variation minimization
+ problem in image restoration for convex data fidelity
+ functionals. We propose a new and fast algorithm which
+ computes an exact solution in the discrete framework. Our
+ method relies on the decomposition of an image into its
+ level sets. It maps the original problems into independent
+ binary Markov Random Field optimization problems at each
+ level. Exact solutions of these binary problems are found
+ thanks to minimum cost cut techniques in graphs. These
+ binary solutions are proved to be monotone increasing with
+ levels and yield thus an exact solution of the discrete
+ original problem. Furthermore we show that minimization of
+ total variation under $L^1$ data fidelity term yields a
+ self-dual contrast invariant filter. Finally we present
+ some results.}
+}
+
+@Article{ darbon.06.jmivb,
+ author = {J\'er\^ome Darbon and Marc Sigelle},
+ title = {Image restoration with discrete constrained {T}otal
+ {Variation}---Part~{II}: Levelable functions, convex priors
+ and non-convex case},
+ journal = {Journal of Mathematical Imaging and Vision},
+ year = 2006,
+ volume = 26,
+ number = 3,
+ month = dec,
+ pages = {277--291},
+ project = {Image},
+ urllrde = {2006XXX-JMIVb},
+ abstract = {In Part II of this paper we extend the results obtained in
+ Part I for total variation minimization in image
+ restoration towards the following directions: first we
+ investigate the decomposability property of energies on
+ levels, which leads us to introduce the concept of
+ levelable regularization functions (which TV is the
+ paradigm of). We show that convex levelable posterior
+ energies can be minimized exactly using the
+ level-independant cut optimization scheme seen in part I.
+ Next we extend this graph cut scheme optimization scheme to
+ the case of non-convex levelable energies. We present
+ convincing restoration results for images corrupted with
+ impulsive noise. We also provide a minimum-cost based
+ algorithm which computes a global minimizer for Markov
+ Random Field with convex priors. Last we show that
+ non-levelable models with convex local conditional
+ posterior energies such as the class of generalized
+ gaussian models can be exactly minimized with a generalized
+ coupled Simulated Annealing.}
+}
+
+@InProceedings{ darbon.06.siam,
+ author = {J\'er\^ome Darbon and Marc Sigelle},
+ title = {Fast and Exact Discrete Image Restoration Based on Total
+ Variation and on Its Extensions to Levelable Potentials},
+ booktitle = {SIAM Conference on Imaging Sciences},
+ year = 2006,
+ address = {Minneapolis, USA},
+ month = may,
+ project = {Image},
+ urllrde = {200605-SIAM},
+ abstract = {We investigate the decomposition property of posterior
+ restoration energies on level sets in a discrete Markov
+ Random Field framework. This leads us to the concept of
+ 'levelable' potentials (which TV is shown to be the
+ paradigm of). We prove that convex levelable posterior
+ energies can be minimized exactly with level-independant
+ binary graph cuts. We extend this scheme to the case of
+ non-convex levelable energies, and present convincing
+ restoration results for images degraded by impulsive
+ noise.}
+}
+
+@TechReport{ darbon.06.tr,
+ author = {J\'er\^ome Darbon and Marc Sigelle and Florence Tupin},
+ title = {A note on nice-levelable {MRFs} for {SAR} image denoising
+ with contrast preservation},
+ institution = {Signal and Image Processing Group, Ecole Nationale
+ Sup\'erieure des T\'el\'ecommunications},
+ year = 2006,
+ number = {2006D006},
+ address = {Paris, France},
+ month = sep,
+ project = {Image},
+ annote = {On this technical report is based the publication
+ darbon.07.ei ; 200701-SPIE},
+ urllrde = {200701-SPIE}
+}
+
+@InProceedings{ darbon.07.ei,
+ author = {J\'er\^ome Darbon and Marc Sigelle and Florence Tupin},
+ title = {The use of levelable regularization functions for {MRF}
+ restoration of {SAR} images},
+ booktitle = {Proceedings of the 19th Symposium SPIE on Electronic
+ Imaging},
+ year = 2007,
+ address = {San Jose, CA, USA},
+ month = jan,
+ project = {Image},
+ urllrde = {200701-SPIE},
+ abstract = {It is well-known that Total Variation (TV) minimization
+ with L2 data fidelity terms (which corresponds to white
+ Gaussian additive noise) yields a restored image which
+ presents some loss of contrast. The same behavior occurs
+ for TVmodels with non-convex data fidelity terms that
+ represent speckle noise. In this note we propose a new
+ approach to cope with the restoration of Synthetic Aperture
+ Radar images while preserving the contrast.}
+}
+
+@InProceedings{ darbon.07.mirage,
+ author = {J\'er\^ome Darbon},
+ title = {A Note on the Discrete Binary {Mumford-Shah} Model},
+ booktitle = {Proceedings of the international Computer Vision /
+ Computer Graphics Collaboration Techniques and Applications
+ (MIRAGE 2007)},
+ year = 2007,
+ address = {Paris, France},
+ month = mar,
+ project = {Image},
+ urllrde = {200703-MIRAGE},
+ abstract = {This paper is concerned itself with the analysis of the
+ two-phase Mumford-Shah model also known as the active
+ contour without edges model introduced by Chan and Vese. It
+ consists of approximating an observed image by a piecewise
+ constant image which can take only two values. First we
+ show that this model with the $L^1$-norm as data fidelity
+ yields a contrast invariant filter which is a well known
+ property of morphological filters. Then we consider a
+ discrete version of the original problem. We show that an
+ inclusion property holds for the minimizers. The latter is
+ used to design an efficient graph-cut based algorithm which
+ computes an exact minimizer. Some preliminary results are
+ presented.}
+}
+
+@InProceedings{ darbon.08.iwcia,
+ author = {J\'er\^ome Darbon},
+ title = {Global Optimization for First Order {Markov} Random Fields
+ with Submodular Priors},
+ booktitle = {Proceedings of the twelfth International Workshop on
+ Combinatorial Image Analysis (IWCIA'08) },
+ year = 2008,
+ address = {Buffalo, New York, USA},
+ month = apr,
+ project = {Image},
+ urllrde = {200801-IWCIA},
+ abstract = {This paper copes with the optimization of Markov Random
+ Fields with pairwise interactions defined on arbitrary
+ graphs. The set of labels is assumed to be linearly ordered
+ and the priors are supposed to be submodular. Under these
+ assumptions we propose an algorithm which computes an exact
+ minimizer of the Markovian energy. Our approach relies on
+ mapping the original into a combinatorial one which
+ involves only binary variables. The latter is shown to be
+ exactly solvable via computing a maximum flow. The
+ restatement into a binary combinatorial problem is done by
+ considering the level-sets of the labels instead of the
+ label values themselves. The submodularity of the priors is
+ shown to be a necessary and sufficient condition for the
+ applicability of the proposed approach.}
+}
+
+@Misc{ david.05.sud,
+ author = {Valentin David and Akim Demaille and Renaud Durlin and
+ Olivier Gournet},
+ title = {{C}/{C++} Disambiguation Using Attribute Grammars},
+ year = 2005,
+ note = {Communication to Stratego Users Day 2005},
+ address = {Utrecht {U}niversity, {N}etherland},
+ month = may,
+ project = {Transformers},
+ urllrde = {200505-SUD-disamb},
+ abstract = {We propose a novel approach to semantics driven
+ disambiguation based on Attribute Grammars (AGs). AGs share
+ the same modularity model as its host grammar language,
+ here Syntax Definition Formalism (SDF), what makes them
+ particularly attractive for working on unstable grammars,
+ or grammar extensions. The framework we propose is
+ effective, since a full ISO-C99 disambiguation chain
+ already works, and the core of the hardest ambiguities of
+ C++ is solved. This requires specific techniques, and some
+ extensions to the stock AG model.}
+}
+
+@InProceedings{ david.06.iccp,
+ author = {Valentin David and Akim Demaille and Olivier Gournet},
+ title = {Attribute Grammars for Modular Disambiguation},
+ booktitle = {Proceedings of the {IEEE} 2nd International Conference on
+ Intelligent Computer Communication and Processing
+ ({ICCP'06)}},
+ year = 2006,
+ address = {Technical University of Cluj-Napoca, Romania},
+ month = sep,
+ urllrde = {200606-ICCP},
+ abstract = {To face the challenges to tomorrow's software engineering
+ tools, powerful language-generic program-transformation
+ components are needed. We propose the use of attribute
+ grammars (AGs) to generate language specific disambiguation
+ filters. In this paper, a complete implementation of a
+ language-independent AGs system is presented. As a full
+ scale experiment, we present an implementation of a
+ flexible C front-end. Its specifications are concise,
+ modular, and the result is efficient. On top of it,
+ transformations such as software renovation, code metrics,
+ domain specific language embedding can be implemented.},
+ project = {Transformers}
+}
+
+@Article{ dehak.05.pami,
+ author = {R\'eda Dehak and Isabelle Bloch and Henri Ma{\^\i}tre},
+ title = {Spatial reasoning with relative incomplete information on
+ relative positioning},
+ journal = {IEEE Transactions on Pattern Analysis and Machine
+ Intelligence},
+ year = 2005,
+ pages = {1473--1484},
+ volume = 27,
+ month = sep,
+ number = 9,
+ project = {Image},
+ urllrde = {200509-PAMI},
+ abstract = {This paper describes a probabilistic method of inferring
+ the position of a point with respect to a reference point
+ knowing their relative spatial position to a third point.
+ We address this problem in the case of incomplete
+ information where only the angular spatial relationships
+ are known. The use of probabilistic representations allows
+ us to model prior knowledge. We derive exact formulae
+ expressing the conditional probability of the position
+ given the two known angles, in typical cases: uniform or
+ Gaussian random prior distributions within rectangular or
+ circular regions. This result is illustrated with respect
+ to two different simulations: The first is devoted to the
+ localization of a mobile phone using only angular
+ relationships, the second, to geopositioning within a city.
+ This last example uses angular relationships and some
+ additional knowledge about the position.}
+}
+
+@InProceedings{ dehak.06.nist,
+ author = {R\'eda Dehak and Charles-Alban Deledalle and Najim Dehak},
+ title = {{LRDE} System description},
+ booktitle = {NIST SRE'06 Workshop: speaker recognition evaluation
+ campaign},
+ year = 2006,
+ address = {San Juan, Puerto Rico},
+ month = jun,
+ urllrde = {200606-NIST-A}
+}
+
+@InProceedings{ dehak.07.interspeech,
+ author = {R\'eda Dehak and Najim Dehak and Patrick Kenny and Pierre
+ Dumouchel},
+ title = {Linear and Non Linear Kernel {GMM} SuperVector Machines
+ for Speaker Verification},
+ booktitle = {Proceedings of the European Conference on Speech
+ Communication and Technologies (Interspeech'07)},
+ year = 2007,
+ address = {Antwerp, Belgium},
+ month = aug,
+ urllrde = {200708-INTERSPEECH},
+ abstract = {This paper presents a comparison between Support Vector
+ Machines (SVM) speaker verification systems based on linear
+ and non linear kernels defined in GMM supervector space. We
+ describe how these kernel functions are related and we show
+ how the nuisance attribute projection (NAP) technique can
+ be used with both of these kernels to deal with the session
+ variability problem. We demonstrate the importance of GMM
+ model normalization (M-Norm) especially for the non linear
+ kernel. All our experiments were performed on the core
+ condition of NIST 2006 speaker recognition evaluation (all
+ trials). Our best results (an equal error rate of 6.3\%)
+ were obtained using NAP and GMM model normalization with
+ the non linear kernel.}
+}
+
+@InProceedings{ dehak.08.nist,
+ author = {R\'eda Dehak and Najim Dehak and Patrick Kenny},
+ title = {The {LRDE} Systems for the 2008 {NIST} Speaker Recognition
+ Evaluation},
+ booktitle = {NIST-SRE 2008},
+ year = 2008,
+ address = {Montr\'eal, Canada},
+ month = jun
+}
+
+@InProceedings{ dehak.08.odysseya,
+ author = {R\'eda Dehak and Najim Dehak and Patrick Kenny and Pierre
+ Dumouchel},
+ title = {Kernel Combination for {SVM} Speaker Verification},
+ booktitle = {Proceedings of the Speaker and Language Recognition
+ Workshop (IEEE-Odyssey 2008)},
+ year = 2008,
+ address = {Stellenbosch, South Africa},
+ month = jan,
+ urllrde = {200709-ODYSSEY-A},
+ abstract = {We present a new approach for constructing the kernels
+ used to build support vector machines for speaker
+ verification. The idea is to construct new kernels by
+ taking linear combination of many kernels such as the GLDS
+ and GMM supervector kernels. In this new kernel
+ combination, the combination weights are speaker dependent
+ rather than universal weights on score level fusion and
+ there is no need for extra-data to estimate them. An
+ experiment on the NIST 2006 speaker recognition evaluation
+ dataset (all trial) was done using three different kernel
+ functions (GLDS kernel, linear and Gaussian GMM supervector
+ kernels). We compared our kernel combination to the optimal
+ linear score fusion obtained using logistic regression.
+ This optimal score fusion was trained on the same test
+ data. We had an equal error rate of $\simeq 5,9\%$ using
+ the kernel combination technique which is better than the
+ optimal score fusion system ($\simeq 6,0\%$).}
+}
+
+@InProceedings{ dehak.08.odysseyb,
+ author = {Najim Dehak and R\'eda Dehak and Patrick Kenny and Pierre
+ Dumouchel},
+ title = {Comparison Between Factor Analysis and {GMM} Support
+ Vector Machines for Speaker Verification},
+ booktitle = {Proceedings of the Speaker and Language Recognition
+ Workshop (IEEE-Odyssey 2008)},
+ year = 2008,
+ address = {Stellenbosch, South Africa},
+ month = jan,
+ urllrde = {200709-ODYSSEY-B},
+ abstract = {We present a comparison between speaker verification
+ systems based on factor analysis modeling and support
+ vector machines using GMM supervectors as features. All
+ systems used the same acoustic features and they were
+ trained and tested on the same data sets. We test two types
+ of kernel (one linear, the other non-linear) for the GMM
+ support vector machines. The results show that factor
+ analysis using speaker factors gives the best results on
+ the core condition of the NIST 2006 speaker recognition
+ evaluation. The difference is particularly marked on the
+ English language subset. Fusion of all systems gave an
+ equal error rate of 4.2\% (all trials) and 3.2\% (English
+ trials only).}
+}
+
+@InProceedings{ dehak.09.icassp,
+ author = {Najim Dehak and Patrick Kenny and R\'eda Dehak and Ondrej
+ Glember and Pierre Dumouchel and Lukas Burget and
+ Valiantsina Hubeika and Fabio Castaldo},
+ title = {Support Vector Machines and Joint Factor Analysis for
+ Speaker Verification},
+ booktitle = {IEEE-ICASSP},
+ year = 2009,
+ address = {Taipei - Taiwan},
+ month = apr,
+ urllrde = {200904-ICASSP},
+ abstract = {This article presents several techniques to combine
+ between Support vector machines (SVM) and Joint Factor
+ Analysis (JFA) model for speaker verification. In this
+ combination, the SVMs are applied on different sources of
+ information produced by the JFA. These informations are the
+ Gaussian Mixture Model supervectors and speakers and Common
+ factors. We found that the use of JFA factors gave the best
+ results especially when within class covariance
+ normalization method is applied in the speaker factors
+ space, in order to compensate for the channel effect. The
+ new combination results are comparable to other classical
+ JFA scoring techniques.}
+}
+
+@InProceedings{ dehak.09.interspeech,
+ author = {Najim Dehak and R\'eda Dehak and Patrick Kenny and Niko
+ Brummer and Pierre Ouellet and Pierre Dumouchel},
+ title = {Support Vector Machines versus Fast Scoring in the
+ Low-Dimensional Total Variability Space for Speaker
+ Verification},
+ booktitle = {Interspeech},
+ year = 2009,
+ month = sep,
+ urllrde = {200909-INTERSPEECH-B},
+ abstract = {This paper presents a new speaker verification system
+ architecture based on Joint Factor Analysis (JFA) as
+ feature extractor. In this modeling, the JFA is used to
+ define a new low-dimensional space named the total
+ variability factor space, instead of both channel and
+ speaker variability spaces for the classical JFA. The main
+ contribution in this approach, is the use of the cosine
+ kernel in the new total factor space to design two
+ different systems: the first system is Support Vector
+ Machines based, and the second one uses directly this
+ kernel as a decision score. This last scoring method makes
+ the process faster and less computation complex compared to
+ others classical methods. We tested several intersession
+ compensation methods in total factors, and we found that
+ the combination of Linear Discriminate Analysis and Within
+ Class Covariance Normalization achieved the best
+ performance.}
+}
+
+@InProceedings{ dehak.09.interspeechb,
+ author = {Pierre Dumouchel and Najim Dehak and Yazid Attabi and
+ R\'eda Dehak and Narj\`es Boufaden},
+ title = {Cepstral and Long-Term Features for Emotion Recognition},
+ booktitle = {Interspeech},
+ year = 2009,
+ month = sep,
+ note = {Open Performance Sub-Challenge Prize},
+ urllrde = {200909-INTERSPEECH-A},
+ abstract = {In this paper, we describe systems that were developed for
+ the Open Performance Sub-Challenge of the INTERSPEECH 2009
+ Emotion Challenge. We participate to both two-class and
+ five-class emotion detection. For the two-class problem,
+ the best performance is obtained by logistic regression
+ fusion of three systems. Theses systems use short- and
+ long-term speech features. This fusion achieved an absolute
+ improvement of 2,6\% on the unweighted recall value
+ compared with [6]. For the five-class problem, we submitted
+ two individual systems: cepstral GMM vs. long-term GMM-UBM.
+ The best result comes from a cepstral GMM and produced an
+ absolute improvement of 3,5\% compared to [6].}
+}
+
+@InProceedings{ dehak.10.nist,
+ author = {R. Dehak and N.Dehak},
+ title = {LRDE {S}peaker {R}ecognition {S}ystem for {NIST-SRE}
+ 2010},
+ booktitle = {NIST 2010 Speaker Recognition Evaluation},
+ year = 2010,
+ address = {Brno, CZ}
+}
+
+@InProceedings{ dehak.10.odyssey,
+ author = {N. Dehak and R. Dehak and J. Glass and D. Reynolds and P.
+ Kenny},
+ title = {Cosine {S}imilarity {S}coring without {S}core
+ {N}ormalization {T}echniques},
+ booktitle = {Odyssey The Speaker and Language Recognition},
+ year = 2010,
+ address = {Brno, Czech Republic}
+}
+
+@InProceedings{ dehak.10.odyssey2,
+ author = {S. Shum and N. Dehak and R. Dehak and J. Glass},
+ title = {Unsupervised {S}peaker {A}daptation based on the {C}osine
+ {S}imilarity for {T}ext-{I}ndependent {S}peaker
+ {V}erification},
+ booktitle = {Odyssey The Speaker and Language Recognition},
+ year = 2010,
+ address = {Brno, Czech Republic}
+}
+
+@InProceedings{ dehak.11.icassp,
+ author = {N. Dehak and Z. Karam and D. Reynolds and R. Dehak and W.
+ Campbell and J. Glass},
+ title = {A {C}hannel-{B}lind {S}ystem for {S}peaker
+ {V}erification},
+ booktitle = {International Conference on Acoustics, Speech and Signal
+ Processing (ICASSP)},
+ pages = {4536--4539},
+ year = 2011,
+ address = {Prage, Czech Republic},
+ month = may
+}
+
+@InProceedings{ dehak.11.interspeech,
+ author = {Najim Dehak and Pedro A. Torres-Carrasquillo and Douglas
+ Reynolds and Reda Dehak},
+ title = {Language {R}ecognition via {I}-{V}ectors and
+ {D}imensionality {R}eduction},
+ booktitle = {INTERSPEECH 2011},
+ pages = {857--860},
+ year = 2011,
+ address = {Florence, Italy},
+ month = aug
+}
+
+@Article{ dehak.11.taslp,
+ author = {N. Dehak and P. Kenny and R. Dehak and P. Dumouchel and P.
+ Ouellet},
+ title = {Front-{E}nd {F}actor {A}nalysis {F}or {S}peaker
+ {V}erification},
+ year = 2011,
+ journal = {IEEE Transactions on Audio, Speech, and Language
+ Processing},
+ volume = 13,
+ number = 4,
+ pages = {788--798},
+ month = may
+}
+
+@InProceedings{ demaille.05.iticse,
+ author = {Akim Demaille},
+ title = {Making Compiler Construction Projects Relevant to Core
+ Curriculums},
+ booktitle = {Proceedings of the Tenth Annual Conference on Innovation
+ and Technology in Computer Science Education
+ ({ITICSE'05})},
+ year = 2005,
+ address = {Universidade Nova de {L}isboa, {M}onte da {P}acarita,
+ {P}ortugal},
+ month = jun,
+ project = {Tiger},
+ urllrde = {200506-ITICSE},
+ isbn = {1-59593-024-8},
+ pages = {266--270},
+ abstract = {Having 300 students a year implement a compiler is a
+ debatable enterprise, since the industry will certainly
+ \emph{not} recruit them for this competence. Yet we made
+ that decision five years ago, for reasons not related to
+ compiler construction. We detail these motivations, the
+ resulting compiler design, and how we manage the
+ assignment. The project meets its goals, since the majority
+ of former students invariably refer to it as \emph{the}
+ project that taught them the most.}
+}
+
+@InProceedings{ demaille.06.isola,
+ author = {Akim Demaille and Sylvain Peyronnet and Beno\^it Sigoure},
+ title = {Modeling of Sensor Networks Using {XRM}},
+ booktitle = {Proceedings of the 2nd International Symposium on
+ Leveraging Applications of Formal Methods, Verification and
+ Validation ({ISoLA'06})},
+ year = 2006,
+ address = {Coral Beach Resort, {P}aphos, {C}yprus},
+ month = nov,
+ project = {Transformers},
+ abstract = {Sensor networks are composed of small electronic devices
+ that embed processors, sensors, batteries, memory and
+ communication capabilities. One of the main goal in the
+ design of such systems is the handling of the inherent
+ complexity of the nodes, strengthened by the huge number of
+ nodes in the network. For these reasons, it becomes very
+ difficult to model and verify such systems. In this paper,
+ we investigate the main characteristics of sensor nodes,
+ discuss about the use of a language derived from Reactive
+ Modules for their modeling and propose a language (and a
+ tool set) that ease the modeling of this kind of systems.},
+ urllrde = {200609-ISOLA}
+}
+
+@InProceedings{ demaille.06.rivf,
+ author = {Akim Demaille and Sylvain Peyronnet and {\relax Th}omas
+ H\'erault},
+ title = {Probabilistic Verification of Sensor Networks},
+ booktitle = {Proceedings of the Fourth International Conference on
+ Computer Sciences, Research, Innovation and Vision for the
+ Future (RIVF'06)},
+ year = 2006,
+ address = {Ho Chi Minh City, Vietnam},
+ isbn = {1-4244-0316-2},
+ month = feb,
+ project = {APMC},
+ urllrde = {200602-rivf},
+ abstract = {Sensor networks are networks consisting of miniature and
+ low-cost systems with limited computation power and energy.
+ Thanks to the low cost of the devices, one can spread a
+ huge number of sensors into a given area to monitor, for
+ example, physical change of the environment. Typical
+ applications are in defense, environment, and design of
+ ad-hoc networks areas. In this paper, we address the
+ problem of verifying the correctness of such networks
+ through a case study. We modelize a simple sensor network
+ whose aim is to detect the apparition of an event in a
+ bounded area (such as a fire in a forest). The behaviour of
+ the network is probabilistic, so we use APMC, a tool that
+ allows to approximately check the correctness of extremely
+ large probabilistic systems, to verify it.}
+}
+
+@InProceedings{ demaille.08.fsmnlp,
+ author = {Akim Demaille and Alexandre Duret-Lutz and Florian Lesaint
+ and Sylvain Lombardy and Jacques Sakarovitch and Florent
+ Terrones},
+ title = {An {XML} format proposal for the description of weighted
+ automata, transducers, and regular expressions},
+ booktitle = {Proceedings of the seventh international workshop on
+ Finite-State Methods and Natural Language Processing
+ (FSMNLP'08)},
+ year = 2008,
+ address = {Ispra, Italia},
+ month = sep,
+ abstract = {We present an XML format that allows to describe a large
+ class of finite weighted automata and transducers. Our
+ design choices stem from our policy of making the
+ implementation as simple as possible. This format has been
+ tested for the communication between the modules of our
+ automata manipulation platform Vaucanson, but this document
+ is less an experiment report than a position paper intended
+ to open the discussion among the community of automata
+ software writers.},
+ urllrde = {200809-FSMNLP}
+}
+
+@InProceedings{ demaille.08.iticse,
+ author = {Akim Demaille and Roland Levillain and Beno\^it Perrot},
+ title = {A Set of Tools to Teach Compiler Construction},
+ booktitle = {Proceedings of the Thirteenth Annual Conference on
+ Innovation and Technology in Computer Science Education
+ ({ITICSE'08})},
+ pages = {68--72},
+ year = 2008,
+ address = {Universidad Polit\'ecnica de Madrid, Spain},
+ month = jun,
+ project = {Tiger},
+ urllrde = {200806-ITICSE},
+ abstract = {Compiler construction is a widely used software
+ engineering exercise, but because most students will not be
+ compiler writers, care must be taken to make it relevant in
+ a core curriculum. Auxiliary tools, such as generators and
+ interpreters, often hinder the learning: students have to
+ fight tool idiosyncrasies, mysterious errors, and other
+ poorly educative issues. We introduce a set of tools
+ especially designed or improved for compiler construction
+ educative projects in \Cxx. We also provide suggestions
+ about new approaches to compiler construction. We draw
+ guidelines from our experience to make tools suitable for
+ education purposes.}
+}
+
+@Misc{ demaille.08.kex,
+ author = {Akim Demaille and Roland Levillain},
+ title = {Compiler Construction as an Effective Application to Teach
+ Object-Oriented Programming},
+ howpublished = {The seventh ``Killer Examples'' workshop, Worked Examples
+ for Sound OO Pedagogy, at OOPSLA'08.},
+ address = {Nashville, USA},
+ month = oct,
+ year = 2008,
+ note = {Oral presentation},
+ project = {Tiger},
+ urllrde = {200810-KEX},
+ abstract = {Compiler construction, a course feared by most students,
+ and a competence seldom needed in the industry. Yet we
+ claim that compiler construction is wonderful topic that
+ benefits from virtually all the computer-science topics. In
+ this paper we show in particular why compiler construction
+ is a killer example for Object-Oriented Programming,
+ providing a unique opportunity for students to understand
+ what it is, what it can be used for, and how it works.}
+}
+
+@InProceedings{ demaille.08.ldta,
+ oldkeys = {durlin.08.seminar},
+ author = {Akim Demaille and Renaud Durlin and Nicolas Pierron and
+ Beno\^it Sigoure},
+ title = {{Semantics driven disambiguation: A comparison of
+ different approaches}},
+ booktitle = {Proceedings of the 8th workshop on Language Descriptions,
+ Tools and Applications (LDTA'08)},
+ year = 2008,
+ urllrde = {200802-LDTA},
+ abstract = {Context-sensitive languages such as \C or \Cxx can be
+ parsed using a context-free but ambiguous grammar, which
+ requires another stage, disambiguation, in order to select
+ the single parse tree that complies with the language's
+ semantical rules. Naturally, large and complex languages
+ induce large and complex disambiguation stages. If, in
+ addition, the parser should be extensible, for instance to
+ enable the embedding of domain specific languages, the
+ disambiguation techniques should feature traditional
+ software-engineering qualities: modularity, extensibility,
+ scalability and expressiveness. \\ We evaluate three
+ approaches to write disambiguation filters for \acs{sdf}
+ grammars: algebraic equations with \acs{asf}, rewrite-rules
+ with programmable traversals for \stratego, and attribute
+ grammars with \acr{tag}, our system. To this end we
+ introduce \phenix, a highly ambiguous language. Its
+ ``standard'' grammar exhibits ambiguities inspired by those
+ found in the \C and \Cxx standard grammars. To evaluate
+ modularity, the grammar is layered: it starts with a small
+ core language, and several layers add new features, new
+ production rules, and new ambiguities.},
+ project = {Transformers},
+ keywords = {Transformers, context-free grammar, attribute grammar,
+ Stratego, ASF, SDF, disambiguation, parsing, program
+ transformation, term rewriting}
+}
+
+@InProceedings{ demaille.09.sac,
+ author = {Akim Demaille and Roland Levillain and Beno\^it Sigoure},
+ title = {{TWEAST}: A Simple and Effective Technique to Implement
+ Concrete-Syntax {AST} Rewriting Using Partial Parsing},
+ booktitle = {Proceedings of the 24th Annual ACM Symposium on Applied
+ Computing (SAC'09)},
+ pages = {1924--1929},
+ year = 2009,
+ address = {Waikiki Beach, Honolulu, Hawaii, USA},
+ month = mar,
+ project = {Tiger},
+ urllrde = {200903-SAC},
+ abstract = {ASTs are commonly used to represent an input/output
+ program in compilers and language processing tools. Many of
+ the tasks of these tools consist in generating and
+ rewriting ASTs. Such an approach can become tedious and
+ hard to maintain for complex operations, namely program
+ transformation, optimization, instrumentation, etc. On the
+ other hand, \emph{concrete syntax} provides a natural and
+ simpler representation of programs, but it is not usually
+ available as a direct feature of the aforementioned tools.
+ We propose a simple technique to implement AST generation
+ and rewriting in general purpose languages using concrete
+ syntax. Our approach relies on extensions made in the
+ scanner and the parser and the use of objects supporting
+ partial parsing called Text With Embedded Abstract Syntax
+ Trees (TWEASTS). A compiler for a simple language (Tiger)
+ written in \Cxx serves as an example, featuring
+ transformations in concrete syntax: syntactic desugaring,
+ optimization, code instrumentation such as bounds-checking,
+ etc. Extensions of this technique to provide a full-fledged
+ concrete-syntax rewriting framework are presented as well.}
+}
+
+@InProceedings{ denise.06.rt,
+ author = {Alain Denise and Marie-Claude Gaudel and
+ Sandrine-Dominique Gouraud and Richard Lassaigne and
+ Sylvain Peyronnet},
+ title = {Uniform Random Sampling of Traces in Very Large Models},
+ booktitle = {Proceedings of the 1st international workshop on Random
+ Testing 2006 (RT06)},
+ year = 2006,
+ series = {ACM digital library},
+ pages = {10--19},
+ project = {APMC},
+ urllrde = {200606-RT},
+ abstract = {This paper presents some first results on how to perform
+ uniform random walks (where every trace has the same
+ probability to occur) in very large models. The models
+ considered here are described in a succinct way as a set of
+ communicating reactive modules. The method relies upon
+ techniques for counting and drawing uniformly at random
+ words in regular languages. Each module is considered as an
+ automaton defining such a language. It is shown how it is
+ possible to combine local uniform drawings of traces, and
+ to obtain some global uniform random sampling, without
+ construction of the global model.}
+}
+
+@Article{ dolstra.10.jfp,
+ author = {Eelco Dolstra and Andres L\"oh and Nicolas Pierron},
+ title = {{NixOS}: A purely functional {Linux} distribution},
+ journal = {Journal of Functional Programming},
+ year = 2010,
+ note = {Published online by Cambridge University Press 15 Oct
+ 2010},
+ urllrde = {201010-JFP},
+ doi = {10.1017/S0956796810000195}
+}
+
+@InProceedings{ duflot.04.avocs,
+ author = {Marie Duflot and Laurent Fribourg and {\relax Th}omas
+ Herault and Richard Lassaigne and Fr\'ed\'eric Magniette
+ and Stephane Messika and Sylvain Peyronnet and Claudine
+ Picaronny},
+ title = {Probabilistic model checking of the {CSMA/CD}, protocol
+ using {PRISM} and {APMC}},
+ booktitle = {Proceedings of the 4th International Workshop on Automated
+ Verification of Critical Systems (AVoCS)},
+ year = 2004,
+ series = {Electronic Notes in Theoretical Computer Science Series},
+ volume = 128,
+ issue = 6,
+ pages = {195--214},
+ project = {APMC},
+ urllrde = {200409-AVOCS},
+ abstract = {Carrier Sense Multiple Access/Collision Detection
+ (CSMA/CD) is the protocol for carrier transmission access
+ in Ethernet networks (international standard IEEE 802.3).
+ On Ethernet, any Network Interface Card (NIC) can try to
+ send a packet in a channel at any time. If another NIC
+ tries to send a packet at the same time, a collision is
+ said to occur and the packets are discarded. The CSMA/CD
+ protocol was designed to avoid this problem, more precisely
+ to allow a NIC to send its packet without collision. This
+ is done by way of a randomized exponential backoff process.
+ In this paper, we analyse the correctness of the CSMA/CD
+ protocol, using techniques from probabilistic model
+ checking and approximate probabilistic model checking. The
+ tools that we use are PRISM and APMC. Moreover, we provide
+ a quantitative analysis of some CSMA/CD properties.}
+}
+
+@InCollection{ duflot.06.book,
+ author = {Marie Duflot and Marta Kwiatkowska and Gethin Norman and
+ Dave Parker and Sylvain Peyronnet and Claudine Picaronny
+ and Jeremy Sproston},
+ title = {Practical Application of Probabilistic Model Checking to
+ Communication Protocols},
+ booktitle = {FMICS Handbook on Industrial Critical Systems},
+ year = 2006,
+ editor = {Stefania Gnesi and Tiziana Margaria},
+ chapter = 7,
+ note = {To appear},
+ project = {APMC},
+ urllrde = {200600-BOOK}
+}
+
+@InProceedings{ duret.00.gcse,
+ author = {Alexandre Duret-Lutz},
+ title = {Olena: a component-based platform for image processing,
+ mixing generic, generative and {OO} programming},
+ booktitle = {Proceedings of the 2nd International Symposium on
+ Generative and Component-Based Software Engineering
+ (GCSE)---Young Researchers Workshop; published in
+ ``Net.ObjectDays2000''},
+ pages = {653--659},
+ year = 2000,
+ address = {Erfurt, Germany},
+ month = oct,
+ isbn = {3-89683-932-2},
+ project = {Olena},
+ urllrde = {200010-NetObjectDays},
+ abstract = {This paper presents Olena, a toolkit for programming and
+ designing image processing chains in which each processing
+ is a component. But since there exist many image types
+ (different structures such as 2D images, 3D images or
+ graphs, as well as different value types) the platform has
+ been designed with genericity and reusability in mind: each
+ component is written as a generic C++ procedure, \`a la
+ STL. Other libraries, such as Khoros [Kon94] have a
+ different approach where a processing component contains an
+ implementation for each type supported by the library. This
+ makes code maintenance hard and prevents easy addition of
+ new image types. Still, Olena is not only a generic
+ component library [Jaz95], it shall contain additional
+ tools such as a visual programming environment (VPE). Those
+ tools may be programmed in a classical object-oriented
+ fashion (using operation and inclusion polymorphism) which
+ may seems antagonist with the generic programming paradigm
+ used in the library. Section 2 outlines the architecture of
+ Olena and elaborates more on the design problems resulting
+ from the use of generic components. Section 3 presents the
+ solution chosen to address these problems.}
+}
+
+@InProceedings{ duret.01.ae,
+ author = {Alexandre Duret-Lutz},
+ title = {Expression templates in {A}da~95},
+ booktitle = {Proceedings of the 6th International Conference on
+ Reliable Software Technologies (Ada-Europe)},
+ year = 2001,
+ series = {Lecture Notes in Computer Science Series},
+ volume = 2043,
+ address = {Leuven, Belgium},
+ month = may,
+ publisher = {Springer-Verlag},
+ pages = {191--202},
+ note = {Best Paper Award.},
+ project = {Software},
+ urllrde = {200105-Ae},
+ abstract = {High-order matrix or vector expressions tend to be
+ penalized by the use of huge temporary variables.
+ Expression templates is a C++ technique which can be used
+ to avoid these temporaries, in a way that is transparent to
+ the user. We present an Ada adaptation of this technique
+ which - while not transparent - addresses the same
+ efficiency issue as the original. We make intensive use of
+ the signature idiom to combine packages together, and
+ discuss its importance in generic programming. Finally, we
+ express some concerns about generic programming in Ada.}
+}
+
+@InProceedings{ duret.01.coots,
+ author = {Alexandre Duret-Lutz and {\relax Th}ierry G\'eraud and
+ Akim Demaille},
+ title = {Generic design patterns in {C++}},
+ booktitle = {Proceedings of the 6th USENIX Conference on
+ Object-Oriented Technologies and Systems (COOTS)},
+ year = 2001,
+ address = {San Antonio, TX, USA},
+ pages = {189--202},
+ month = {January-February},
+ publisher = {USENIX Association},
+ project = {Software},
+ urllrde = {200102-Coots},
+ abstract = {Generic programming is a paradigm whose wide adoption by
+ the C++ community is quite recent. In this approach most
+ classes and procedures are parameterized, leading to the
+ construction of general and efficient software components.
+ In this paper, we show how some design patterns from Gamma
+ et al. can be adapted to this paradigm. Although these
+ patterns rely highly on dynamic binding. We show that, by
+ making intensive use of parametric polymorphism in the
+ context of generic programming, the method calls in these
+ patterns can be resolved at compile-time. The speed-up
+ achieved using these patterns is significant.}
+}
+
+@InProceedings{ duret.09.atva,
+ author = {Alexandre Duret-Lutz and Denis Poitrenaud and Jean-Michel
+ Couvreur},
+ title = {On-the-fly Emptiness Check of Transition-based {S}treett
+ Automata},
+ booktitle = {Proceedings of the 7th International Symposium on
+ Automated Technology for Verification and Analysis
+ (ATVA'09)},
+ year = 2009,
+ editor = {Zhiming Liu and Anders P. Ravn},
+ series = {Lecture Notes in Computer Science},
+ publisher = {Springer-Verlag},
+ pages = {213--227},
+ volume = 5799,
+ abstract = {In the automata theoretic approach to model checking,
+ checking a state-space $S$ against a linear-time property
+ $\varphi$ can be done in $\RO(|S|\times
+ 2^{\RO(|\varphi|)})$ time. When model checking under $n$
+ strong fairness hypotheses expressed as a Generalized
+ B\"uchi automaton, this complexity becomes $\RO(|S|\times
+ 2^{\RO(|\varphi|+n)})$.\par Here we describe an algorithm
+ to check the emptiness of Streett automata, which allows
+ model checking under $n$ strong fairness hypotheses in
+ $\RO(|S|\times 2^{\RO(|\varphi|)}\times n)$. We focus on
+ transition-based Streett automata, because it allows us to
+ express strong fairness hypotheses by injecting Streett
+ acceptance conditions into the state-space without any blowup.},
+ urllrde = {200910-ATVA}
+}
+
+@TechReport{ duret.11.arxiv,
+ author = {Alexandre Duret-Lutz and Kais Klai and Denis Poitrenaud
+ and Yann Thierry-Mieg},
+ title = {Combining Explicit and Symbolic Approaches for Better
+ On-the-Fly {LTL} Model Checking},
+ institution = {arXiv},
+ year = 2011,
+ number = {1106.5700},
+ month = jun,
+ note = {Extended version of our ATVA'11 paper, presenting two new
+ techniques instead of one.},
+ url = {http://arxiv.org/abs/1106.5700},
+ urllrde = {201106-ARXIV},
+ abstract = {We present two new hybrid techniques that replace the
+ synchronized product used in the automata-theoretic
+ approach for LTL model checking. The proposed products are
+ explicit graphs of aggregates (symbolic sets of states)
+ that can be interpreted as B\"uchi automata. These hybrid
+ approaches allow on the one hand to use classical
+ emptiness-check algorithms and build the graph on-the-fly,
+ and on the other hand, to have a compact encoding of the
+ state space thanks to the symbolic representation of the
+ aggregates. The \emph{Symbolic Observation Product} assumes
+ a globally stuttering property (e.g., LTL$\setminus \X$) to
+ aggregate states. The \emph{Self-Loop Aggregation Product}
+ does not require the property to be globally stuttering
+ (i.e., it can tackle full LTL), but dynamically detects and
+ exploits a form of stuttering where possible. Our
+ experiments show that these two variants, while
+ incomparable with each other, can outperform other existing
+ approaches.}
+}
+
+@InProceedings{ duret.11.atva,
+ author = {Alexandre Duret-Lutz and Kais Klai and Denis Poitrenaud
+ and Yann Thierry-Mieg},
+ title = {Self-Loop Aggregation Product --- A New Hybrid Approach to
+ On-the-Fly {LTL} Model Checking},
+ booktitle = {Proceedings of the 9th International Symposium on
+ Automated Technology for Verification and Analysis
+ (ATVA'11)},
+ year = 2011,
+ series = {Lecture Notes in Computer Science},
+ volume = {6996},
+ pages = {336--350},
+ address = {Taipei, Taiwan},
+ month = oct,
+ publisher = {Springer},
+ urllrde = {201110-ATVA},
+ abstract = {We present the \emph{Self-Loop Aggregation Product}
+ (SLAP), a new hybrid technique that replaces the
+ synchronized product used in the automata-theoretic
+ approach for LTL model checking. The proposed product is an
+ explicit graph of aggregates (symbolic sets of states) that
+ can be interpreted as a B\"uchi automata. The criterion
+ used by SLAP to aggregate states from the Kripke structure
+ is based on the analysis of self-loops that occur in the
+ B\"uchi automaton expressing the property to verify. Our
+ hybrid approach allows on the one hand to use classical
+ emptiness-check algorithms and build the graph on-the-fly,
+ and on the other hand, to have a compact encoding of the
+ state space thanks to the symbolic representation of the
+ aggregates. Our experiments show that this technique often
+ outperforms other existing (hybrid or fully symbolic)
+ approaches.}
+}
+
+@Misc{ duret.11.sumo,
+ author = {Alexandre Duret-Lutz},
+ title = {Building {LTL} Model Checkers using {T}ransition-based
+ {G}eneralized {B\"u}chi {A}utomata},
+ howpublished = {Invited talk to SUMo'11},
+ month = jun,
+ year = 2011
+}
+
+@InProceedings{ duret.11.vecos,
+ author = {Alexandre Duret-Lutz},
+ title = {{LTL} Translation Improvements in {Spot}},
+ booktitle = {Proceedings of the 5th International Workshop on
+ Verification and Evaluation of Computer and Communication
+ Systems (VECoS'11)},
+ year = {2011},
+ series = {Electronic Workshops in Computing},
+ address = {Tunis, Tunisia},
+ month = sep,
+ publisher = {British Computer Society},
+ abstract = {Spot is a library of model-checking algorithms. This paper
+ focuses on the module translating LTL formul{\ae} into
+ automata. We discuss improvements that have been
+ implemented in the last four years, we show how Spot's
+ translation competes on various benchmarks, and we give
+ some insight into its implementation.},
+ url = {http://ewic.bcs.org/category/15853},
+ urllrde = {201109-VECOS}
+}
+
+@InProceedings{ fabre.00.egve,
+ author = {Yoann Fabre and Guillaume Pitel and Laurent Soubrevilla
+ and Emmanuel Marchand and {\relax Th}ierry G\'eraud and
+ Akim Demaille},
+ title = {An asynchronous architecture to manage communication,
+ display, and user interaction in distributed virtual
+ environments},
+ booktitle = {Virtual Environments 2000, Proceedings of the 6th
+ Eurographics Workshop on Virtual Environments (EGVE)},
+ year = 2000,
+ address = {Amsterdam, The Netherlands},
+ month = jun,
+ pages = {105--113},
+ series = {Computer Science / Eurographics Series},
+ editor = {J.D. Mulder and R. van Liere},
+ publisher = {Springer-Verlag WienNewYork},
+ project = {Urbi},
+ urllrde = {200006-Egve},
+ abstract = {In Distributed Virtual Environments, each machine runs the
+ same software, which is in charge of handling the
+ communications over the network, providing the user with a
+ view of the world, and processing his requests. A major
+ issue in the design of such a software is to ensure that
+ network communication does not degrade the interactivity
+ between the machine and the user. In this paper, we present
+ a software designed to achieve this goal, based on tools
+ rarely used in this area.}
+}
+
+@InProceedings{ fabre.00.vsmm,
+ author = {Yoann Fabre and Guillaume Pitel and Didier Verna},
+ title = {Urbi et {O}rbi: unusual design and implementation choices
+ for distributed virtual environments},
+ booktitle = {Proceedings of the 6th International Conference on Virtual
+ Systems and MultiMedia (VSMM)---Intelligent Environments
+ Workshop},
+ pages = {714--724},
+ year = 2000,
+ address = {Gifu, Japan},
+ month = oct,
+ publisher = {IOS Press, USA},
+ isbn = {1-58603-108-2},
+ project = {Urbi},
+ urllrde = {200010-Vsmm},
+ abstract = {This paper describes Urbi et Orbi, a distributed virtual
+ environment (DVE) project that is being conducted in the
+ Research and Development Laboratory at EPITA. Our ultimate
+ goal is to provide support for large scale multi-user
+ virtual worlds on end-user machines. The incremental
+ development of this project led us to take unusual design
+ and implementation decisions that we propose to relate in
+ this paper. Firstly, a general overview of the project is
+ given, along with the initial requirements we wanted to
+ meet. Then, we go on with a description of the system's
+ architecture. Lastly, we describe and justify the unusual
+ choices we have made in the project's internals.}
+}
+
+@InProceedings{ fabre.00.vw,
+ author = {Yoann Fabre and Guillaume Pitel and Laurent Soubrevilla
+ and Emmanuel Marchand and {\relax Th}ierry G\'eraud and
+ Akim Demaille},
+ title = {A framework to dynamically manage distributed virtual
+ environments},
+ booktitle = {Proceedings of the 2nd International Conference on Virtual
+ Worlds (VW)},
+ year = 2000,
+ address = {Paris, France},
+ month = jul,
+ pages = {54--64},
+ editor = {J.-C. Heudin},
+ publisher = {Springer Verlag},
+ series = {Lecture Notes in Computer Science Series},
+ volume = {LNAI 1834},
+ project = {Urbi},
+ urllrde = {200007-VW},
+ abstract = {In this paper, we present the project urbi, a framework to
+ dynamically manage distributed virtual environments (DVEs).
+ This framework relies on a dedicated scripting language,
+ goal, which is typed, object-oriented and dynamically
+ bound. goal is interpreted by the application hosted by
+ each machine and is designed to handle efficiently both
+ network communications and interactivity. Finally, we have
+ made an unusual design decision: our project is based on a
+ functional programming language, ocaml.}
+}
+
+@InProceedings{ fouquier.07.gbr,
+ author = {Geoffroy Fouquier and Jamal Atif and Isabelle Bloch},
+ title = {Local reasoning in fuzzy attribute graphs for optimizing
+ sequential segmentation},
+ booktitle = {Proceedings of the 6th IAPR TC-15 Workshop on Graph-based
+ Representations in Pattern Recognition (GBR)},
+ year = 2007,
+ month = jun,
+ address = {Alicante, Spain},
+ volume = {LNCS 4538},
+ editor = {F. Escolano and M. Vento},
+ publisher = {Springer Verlag},
+ isbn = {978-3-540-72902-0},
+ pages = {138--147},
+ urllrde = {200706-GBR},
+ abstract = {Spatial relations play a crucial role in model-based image
+ recognition and interpretation due to their stability
+ compared to many other image appearance characteristics.
+ Graphs are well adapted to represent such information.
+ Sequential methods for knowledge-based recognition of
+ structures require to define in which order the structures
+ have to be recognized. We propose to address this problem
+ of order definition by developing algorithms that
+ automatically deduce sequential segmentation paths from
+ fuzzy spatial attribute graphs. As an illustration, these
+ algorithms are applied on brain image understanding.}
+}
+
+@InProceedings{ fouquier.07.icassp,
+ author = {Geoffroy Fouquier and Laurence Likforman and J\'er\^ome
+ Darbon and Bulent Sankur},
+ title = {The Biosecure Geometry-based System for Hand Modality},
+ booktitle = {Proceedings of the 32nd IEEE International Conference on
+ Acoustics, Speech, and Signal Processing (ICASSP)},
+ year = 2007,
+ month = apr,
+ address = {Honolulu, Hawaii, USA},
+ volume = {I},
+ pages = {801--804},
+ isbn = {1-4244-0728-1},
+ urllrde = {200704-ICASSP},
+ abstract = {We present an identification and authentification system
+ based on hand modality which is part of a reference system
+ for all modalities developed within the Biosecure
+ consortium. It relies on simple geometric features
+ extracted from hand boundary. The different steps of this
+ system are detailed, namely: pre-processing, feature
+ extraction and hand matching. This system has been tested
+ on the Biosecure hand database which consists of 4500 hand
+ images of 750 individuals. Results are detailed with
+ respect to different enrolment conditions such as
+ population size, enrolment size, and image resolution.}
+}
+
+@InProceedings{ geraud.00.europlop,
+ author = {{\relax Th}ierry G\'eraud and Alexandre Duret-Lutz},
+ title = {Generic programming redesign of patterns},
+ booktitle = {Proceedings of the 5th European Conference on Pattern
+ Languages of Programs (EuroPLoP)},
+ year = 2000,
+ month = jul,
+ address = {Irsee, Germany},
+ pages = {283--294},
+ editors = {M. Devos and A. R\"uping},
+ publisher = {UVK, Univ. Verlag, Konstanz},
+ project = {Software},
+ urllrde = {200007-EuroPlop},
+ abstract = {This pattern faces the problem of improving the
+ performances of design patterns when they are involved in
+ intensive algorithms. Generic programming is a paradigm in
+ which most classes and procedures are parameterized, thus
+ leading to the construction of general and efficient
+ software components. We demonstrate that some design
+ patterns from Gamma et al. can be translated into this
+ paradigm while handling operation polymorphism by
+ parametric polymorphism. We thus preserve their modularity
+ and reusability properties but we avoid the performance
+ penalty due to their dynamic behavior, which is a critical
+ issue in numerical computing.}
+}
+
+@InProceedings{ geraud.00.icpr,
+ author = {{\relax Th}ierry G\'eraud and Yoann Fabre and Alexandre
+ Duret-Lutz and Dimitri Papadopoulos-Orfanos and
+ Jean-Fran\c{c}ois Mangin},
+ title = {Obtaining genericity for image processing and pattern
+ recognition algorithms},
+ booktitle = {Proceedings of the 15th International Conference on
+ Pattern Recognition (ICPR)},
+ year = 2000,
+ month = sep,
+ address = {Barcelona, Spain},
+ volume = 4,
+ pages = {816--819},
+ publisher = {IEEE Computer Society},
+ project = {Olena},
+ urllrde = {200009-Icpr},
+ abstract = {Algorithm libraries dedicated to image processing and
+ pattern recognition are not reusable; to run an algorithm
+ on particular data, one usually has either to rewrite the
+ algorithm or to manually ``copy, paste, and modify''. This
+ is due to the lack of genericity of the programming
+ paradigm used to implement the libraries. In this paper, we
+ present a recent paradigm that allows algorithms to be
+ written once and for all and to accept input of various
+ types. Moreover, this total reusability can be obtained
+ with a very comprehensive writing and without significant
+ cost at execution, compared to a dedicated algorithm. This
+ new paradigm is called ``generic programming'' and is fully
+ supported by the C++ language. We show how this paradigm
+ can be applied to image processing and pattern recognition
+ routines. The perspective of our work is the creation of a
+ generic library.}
+}
+
+@InProceedings{ geraud.00.rfia,
+ author = {{\relax Th}ierry G\'eraud and Isabelle Bloch and Henri
+ Ma{\^\i}tre},
+ title = {Reconnaissance de structures c\'er\'ebrales \`a l'aide
+ d'un atlas et par fusion d'informations structurelles floues},
+ booktitle = {Actes du 12\`eme Congr\`es Francophone AFRIF-AFIA de
+ Reconnaissance des Formes et Intelligence Artificielle
+ (RFIA)},
+ year = 2000,
+ address = {Paris, France},
+ month = feb,
+ volume = 1,
+ pages = {287--295},
+ note = {EPITA as current address.},
+ category = {national},
+ project = {Image},
+ urllrde = {200002-RFIA},
+ abstract = {Nous proposons une proc\'edure automatique de
+ reconnaissance progressive des structures internes du
+ cerveau guid\'ee par un atlas anatomique. L'originalit\'e
+ de notre proc\'edure est multiple. D'une part, elle prend
+ en compte des informations structurelles sous la forme de
+ contraintes spatiales flexibles, en utilisant les
+ formalismes de la th\'eorie des ensembles flous et de la
+ fusion d'informations. D'autre part, le calcul de la
+ correspondance entre volume IRM et atlas que nous proposons
+ permet d'inf\'erer un champ de d\'eformations discret,
+ respectant des contraintes sur la surface des objets.
+ Enfin, le caract\`ere s\'equentiel de la proc\'edure permet
+ de s'appuyer sur la connaissance des objets d\'ej\`a
+ segment\'es pour acc\'eder \`a des objets dont l'obtention
+ est a priori de plus en plus difficile.}
+}
+
+@InProceedings{ geraud.01.ai,
+ author = {{\relax Th}ierry G\'eraud and Yoann Fabre and Alexandre
+ Duret-Lutz},
+ title = {Applying generic programming to image processing},
+ booktitle = {Proceedings of the IASTED International Conference on
+ Applied Informatics (AI)---Symposium on Advances in
+ Computer Applications},
+ year = 2001,
+ publisher = {ACTA Press},
+ editor = {M.H.~Hamsa},
+ address = {Innsbruck, Austria},
+ pages = {577--581},
+ month = feb,
+ project = {Olena},
+ urllrde = {200102-Ai},
+ abstract = {This paper presents the evolution of algorithms
+ implementation in image processing libraries and discusses
+ the limits of these implementations in terms of
+ reusability. In particular, we show that in C++, an
+ algorithm can have a general implementation; said
+ differently, an implementation can be generic, i.e.,
+ independent of both the input aggregate type and the type
+ of the data contained in the input aggregate. A total
+ reusability of algorithms can therefore be obtained;
+ moreover, a generic implementation is more natural and does
+ not introduce a meaningful additional cost in execution
+ time as compared to an implementation dedicated to a
+ particular input type.}
+}
+
+@InProceedings{ geraud.01.icip,
+ author = {{\relax Th}ierry G\'eraud and Pierre-Yves Strub and
+ J\'er\^ome Darbon},
+ title = {Color image segmentation based on automatic morphological
+ clustering},
+ booktitle = {Proceedings of the IEEE International Conference on Image
+ Processing (ICIP)},
+ year = 2001,
+ volume = 3,
+ pages = {70--73},
+ address = {Thessaloniki, Greece},
+ month = oct,
+ project = {Image},
+ urllrde = {200110-Icip},
+ abstract = {We present an original method to segment color images
+ using a classification in the 3-D color space. In the case
+ of ordinary images, clusters that appear in 3-D histograms
+ usually do not fit a well-known statistical model. For that
+ reason, we propose a classifier that relies on mathematical
+ morphology, and more precisely on the watershed algorithm.
+ We show on various images that the expected color clusters
+ are correctly identified by our method. Last, to segment
+ color images into coherent regions, we perform a Markovian
+ labeling that takes advantage of the morphological
+ classification results.}
+}
+
+@InProceedings{ geraud.01.icisp,
+ author = {{\relax Th}ierry G\'eraud and Pierre-Yves Strub and
+ J\'er\^ome Darbon},
+ title = {Segmentation d'images en couleur par classification
+ morphologique non supervis\'ee},
+ booktitle = {Proceedings of the International Conference on Image and
+ Signal Processing (ICISP)},
+ year = 2001,
+ pages = {387--394},
+ address = {Agadir, Morocco},
+ month = may,
+ publisher = {Faculty of Sciences at Ibn Zohr University, Morocco},
+ note = {In French},
+ project = {Image},
+ urllrde = {200105-Icisp},
+ abstract = {In this paper, we present an original method to segment
+ color images using a classification of the image histogram
+ in the 3D color space. As color modes in natural images
+ usually do not fit a well-known statistical model, we
+ propose a classifier that rely on mathematical morphology
+ and, more particularly, on the watershed algorithm. We show
+ on various images that the expected color modes are
+ correctly identified and, in order to obtain coherent
+ region, we extend the method to make the segmentation
+ contextual.}
+}
+
+@InProceedings{ geraud.03.grec,
+ author = {{\relax Th}ierry G\'eraud and Geoffroy Fouquier and Quoc
+ Peyrot and Nicolas Lucas and Franck Signorile},
+ title = {Document type recognition using evidence theory},
+ booktitle = {Proceedings of the 5th IAPR International Workshop on
+ Graphics Recognition (GREC)},
+ year = 2003,
+ pages = {212--221},
+ editors = {Josep Llad\`os},
+ address = {Computer Vision Center, UAB, Barcelona, Spain},
+ month = jul,
+ project = {Image},
+ urllrde = {200307-Grec},
+ abstract = {This paper presents a method to recognize the type of a
+ document when a database of models (document types) is
+ given. For instance, when every documents are forms and
+ when we know every different types of forms, we want to be
+ able to assign to an input document its type of form. To
+ that aim, we define each model by a set of characteristics
+ whose nature can vary from one to another. For instance, a
+ characteristic can be having a flower-shaped logo on
+ top-left as well as having about 12pt fonts. This paper
+ does not intent to explain how to extract such knowledge
+ from documents but it describes how to use such information
+ to decide what the type of a given document is when
+ different document types are described by
+ characteristics.}
+}
+
+@InProceedings{ geraud.03.ibpria,
+ author = {{\relax Th}ierry G\'eraud},
+ title = {Segmentation of curvilinear objects using a
+ watershed-based curve adjacency graph},
+ booktitle = {Proceedings of the 1st Iberian Conference on Pattern
+ Recognition and Image Analysis (IbPRIA)},
+ pages = {279--286},
+ year = 2003,
+ editor = {Springer-Verlag},
+ volume = 2652,
+ series = {Lecture Notes in Computer Science Series},
+ address = {Mallorca, Spain},
+ month = jun,
+ publisher = {Springer-Verlag},
+ project = {Image},
+ urllrde = {200306-Ibpria},
+ abstract = {This paper presents a general framework to segment
+ curvilinear objects in 2D images. A pre-processing step
+ relies on mathematical morphology to obtain a connected
+ line which encloses curvilinear objects. Then, a graph is
+ constructed from this line and a Markovian Random Field is
+ defined to perform objects segmentation. Applications of
+ our framework are numerous: they go from simple surve
+ segmentation to complex road network extraction in
+ satellite images.}
+}
+
+@InProceedings{ geraud.03.icisp,
+ author = {{\relax Th}ierry G\'eraud},
+ title = {Segmentation d'objets curvilignes \`a l'aide des champs de
+ Markov sur un graphe d'adjacence de courbes issu de
+ l'algorithme de la ligne de partage des eaux},
+ booktitle = {Proceedings of the International Conference on Image and
+ Signal Processing (ICISP)},
+ year = 2003,
+ volume = 2,
+ pages = {404--411},
+ address = {Agadir, Morocco},
+ month = jun,
+ publisher = {Faculty of Sciences at Ibn Zohr University, Morocco},
+ note = {In French},
+ project = {Image},
+ urllrde = {200306-Icisp},
+ abstract = {This paper presents a general framework to segment
+ curvilinear objects in 2D images. A pre-processing step
+ relies on mathematical morphology to obtain a connected
+ line which encloses curvilinear objects. Then, a graph is
+ constructed from this line and a Markovian Random Field is
+ defined to perform objects segmentation. Applications of
+ our framework are numerous: they go from simple surve
+ segmentation to complex road network extraction in
+ satellite images.}
+}
+
+@InProceedings{ geraud.03.nsip,
+ author = {{\relax Th}ierry G\'eraud},
+ title = {Fast Road Network Extraction in Satellite Images using
+ Mathematical Morphology and {MRF}},
+ booktitle = {Proceedings of the EURASIP Workshop on Nonlinear Signal
+ and Image Processing (NSIP)},
+ year = 2003,
+ address = {Trieste, Italy},
+ month = jun,
+ project = {Image},
+ urllrde = {200306-Nsip},
+ abstract = {This paper presents a fast method to extract road network
+ in satellite images. A pre-processing stage relies on
+ mathematical morphology to obtain a connected line which
+ encloses road network. Then, a graph is constructed from
+ this line and a Markovian Random Field is defined to
+ perform road extraction.}
+}
+
+@InProceedings{ geraud.04.iccvg,
+ author = {{\relax Th}ierry G\'eraud and Giovanni Palma and Niels
+ {Van Vliet}},
+ title = {Fast color image segmentation based on levellings in
+ feature Space},
+ booktitle = {Computer Vision and Graphics---International Conference on
+ Computer Vision and Graphics (ICCVG), Warsaw, Poland,
+ September 2004},
+ year = 2004,
+ series = {Computational Imaging and Vision},
+ volume = 32,
+ editor = {Kluwer Academic Publishers},
+ pages = {800--807},
+ note = {On CD.},
+ project = {Image},
+ urllrde = {200408-ICCVG},
+ abstract = {This paper presents a morphological classifier with
+ application to color image segmentation. The basic idea of
+ a morphological classifier is to consider that a color
+ histogram is a 3D gray-level image and that morphological
+ operators can be applied to modify this image. The final
+ objective is to extract clusters in color space, that is,
+ identify regions in the 3D image. In this paper, we
+ particularly focus on a powerful class of morphology-based
+ filters called levellings to transform the 3D
+ histogram-image to identify clusters. We also show that our
+ method gives better results than the ones of
+ state-of-the-art methods.}
+}
+
+@Article{ geraud.04.jasp,
+ author = {{\relax Th}ierry G\'eraud and Jean-Baptiste Mouret},
+ title = {Fast road network extraction in satellite images using
+ mathematical morphology and {M}arkov random fields},
+ journal = {EURASIP Journal on Applied Signal Processing},
+ year = 2004,
+ number = 16,
+ volume = 2004,
+ pages = {2503--2514},
+ month = nov,
+ note = {Special issue on Nonlinear Signal and Image Processing -
+ Part II},
+ project = {Image},
+ doi = {http://doi.acm.org/10.1155/S1110865704409093},
+ urllrde = {200409-JASP},
+ abstract = {This paper presents a fast method for road network
+ extraction in satellite images. It can be seen as a
+ transposition of the segmentation scheme "watershed
+ transform + region adjacency graph + Markov random fields"
+ to the extraction of curvilinear objects. Many road
+ extractors can be found in the literature which are
+ composed of two stages. The first one acts like a filter
+ that can decide from a local analysis, at every image
+ point, if there is a road or not. The second stage aims at
+ obtaining the road network structure. In the method we
+ propose, we rely on a "potential" image, that is,
+ unstructured image data that can be derived from any road
+ extractor filter. In such a potential image, the value
+ assigned to a point is a measure of its likelihood to be
+ located in the middle of a road. A filtering step applied
+ on the potential image relies on the area closing operator
+ followed by the watershed transform to obtain a connected
+ line which encloses the road network. Then a graph
+ describing adjacency relationships between watershed lines
+ is built. Defining Markov random fields upon this graph,
+ associated with an energetic model of road networks, leads
+ to the expression of road network extraction as a global
+ energy minimization problem. This method can easily be
+ adapted to other image processing fields where the
+ recognition of curvilinear structures is involved.}
+}
+
+@InProceedings{ geraud.05.ismm,
+ author = {{\relax Th}ierry G\'eraud},
+ title = {Ruminations on {T}arjan's {U}nion-{F}ind algorithm and
+ connected operators},
+ booktitle = ismm05,
+ year = 2005,
+ address = {Paris, France},
+ month = apr,
+ pages = {105--116},
+ publisher = {Springer},
+ series = {Computational Imaging and Vision},
+ volume = 30,
+ project = {Image},
+ urllrde = {200504-ISMM},
+ abstract = {This papers presents a comprehensive and general form of
+ the Tarjan's union-find algorithm dedicated to connected
+ operators. An interesting feature of this form is to
+ introduce the notion of separated domains. The properties
+ of this form and its flexibility are discussed and
+ highlighted with examples. In particular, we give clues to
+ handle correctly the constraint of domain-disjointness
+ preservation and, as a consequence, we show how we can rely
+ on ``union-find'' to obtain algorithms for self-dual
+ filters approaches and levelings with a marker function.}
+}
+
+@InProceedings{ geraud.08.mpool,
+ author = {{\relax Th}ierry G\'eraud and Roland Levillain},
+ title = {Semantics-Driven Genericity: A Sequel to the Static {C++}
+ Object-Oriented Programming Paradigm ({SCOOP 2})},
+ booktitle = {Proceedings of the 6th International Workshop on
+ Multiparadigm Programming with Object-Oriented Languages
+ (MPOOL)},
+ year = 2008,
+ address = {Paphos, Cyprus},
+ month = jul,
+ project = {Olena},
+ urllrde = {200807-MPOOL},
+ abstract = {Classical (unbounded) genericity in \Cxx{}03 defines the
+ interactions between generic data types and algorithms in
+ terms of concepts. Concepts define the requirements over a
+ type (or a parameter) by expressing constraints on its
+ methods and dependent types (typedefs). The upcoming
+ \Cxx{}0x standard will promote concepts from abstract
+ entities (not directly enforced by the tools) to language
+ constructs, enabling compilers and tools to perform
+ additional checks on generic constructs as well as enabling
+ new features (e.g., concept-based overloading). Most modern
+ languages support this notion of signature on generic
+ types. However, generic types built on other types and
+ relying on concepts to both ensure type conformance and
+ drive code specialization, restrain the interface and the
+ implementation of the newly created type: specific methods
+ and associated types not mentioned in the concept will not
+ be part of the new type. The paradigm of concept-based
+ genericity lacks the required semantics to transform types
+ while retaining or adapting their intrinsic capabilities.
+ We present a new form of semantically-enriched genericity
+ allowing static generic type transformations through a
+ simple form of type introspection based on type metadata
+ called properties. This approach relies on a new Static
+ \Cxx Object-Oriented Programming (SCOOP) paradigm, and is
+ adapted to the creation of generic and efficient libraries,
+ especially in the field of scientific computing. Our
+ proposal uses a metaprogramming facility built into a \Cxx
+ library called Static, and doesn't require any language
+ extension nor additional processing (preprocessor,
+ transformation tool).}
+}
+
+@InCollection{ geraud.10.book,
+ author = {{\relax Th}ierry G\'eraud and Hugues Talbot and Marc Van
+ Droogenbroeck},
+ title = {Algorithms for Mathematical Morphology},
+ booktitle = {Mathematical Morphology---From Theory to Applications},
+ pages = {323--353},
+ publisher = {Wiley-ISTE},
+ year = 2010,
+ editor = {Laurent Najman and Hugues Talbot},
+ isbn = {978-1-84821-215-2},
+ month = jul,
+ url = {http://eu.wiley.com/WileyCDA/WileyTitle/productCd-1848212151.html}
+ ,
+ urllrde = {201007-BOOK}
+}
+
+@InCollection{ geraud.10.livre,
+ author = {{\relax Th}ierry G\'eraud and Hugues Talbot and Marc Van
+ Droogenbroeck},
+ title = {Morphologie et algorithmes},
+ booktitle = {Morphologie math\'ematique 2~: estimation, choix et mise
+ en {\oe}uvre},
+ pages = {151--180},
+ publisher = {Herm\`es Science Publications},
+ year = 2010,
+ series = {IC2 signal et image},
+ chapter = 6,
+ editor = {Laurent Najman and Hugues Talbot},
+ month = sep,
+ urllrde = {201009-LIVRE}
+}
+
+@InProceedings{ geraud.99.cimaf,
+ author = {{\relax Th}ierry G\'eraud and Isabelle Bloch and Henri
+ Ma\^tre},
+ title = {Atlas-guided recognition of cerebral structures in {MRI}
+ using fusion of fuzzy structural information},
+ booktitle = {Proceeding of CIMAF Symposium on Artificial Intelligence},
+ pages = {99--106},
+ year = 1999,
+ address = {La Havana, Cuba},
+ note = {EPITA as current address.},
+ project = {Image},
+ urllrde = {199900-CIMAF}
+}
+
+@InProceedings{ geraud.99.gretsi,
+ author = {{\relax Th}ierry G\'eraud and Yoann Fabre and Dimitri
+ Papadopoulos-Orfanos and Jean-Fran\c{c}ois Mangin},
+ title = {Vers une r\'eutilisabilit\'e totale des algorithmes de
+ traitement d'images},
+ booktitle = {Proceedings of the 17th Symposium on Signal and Image
+ Processing (GRETSI)},
+ category = {national},
+ pages = {331--334},
+ volume = 2,
+ year = 1999,
+ address = {Vannes, France},
+ month = sep,
+ note = {In French},
+ project = {Olena},
+ urllrde = {199909-Gretsi},
+ abstract = {Cet article pr\'esente l'\'evolution des techniques de
+ programmation d'algorithmes de traitement d'images et
+ discute des limites de la r\'eutilisabilit\'e de ces
+ algorithmes. En particulier, nous montrons qu'en C++ un
+ algorithme peut s'\'ecrire sous une forme g\'en\'erale,
+ ind\'ependante aussi bien du type des donn\'ees que du type
+ des structures de donn\'ees sur lesquelles il peut
+ s'appliquer. Une r\'eutilisabilit\'e totale des algorithmes
+ peut donc \^etre obtenue ; mieux, leur \'ecriture est plus
+ naturelle et elle n'introduit pas de surco\^ut significatif
+ en temps d'ex\'ecution.}
+}
+
+@Misc{ gournet.05.sud,
+ author = {Alexandre Borghi and Valentin David and Akim Demaille and
+ Olivier Gournet},
+ title = {Implementing attributes in {SDF}},
+ year = 2005,
+ address = {Utrecht {U}niversity, {N}etherland},
+ note = {Communication to Stratego Users Day 2005},
+ month = may,
+ project = {Transformers},
+ urllrde = {200505-SUD-ag},
+ abstract = {Attribute Grammars (AGs) provide a very convenient means
+ to bind semantics to syntax. They enjoy an extensive
+ bibliography and are used in several types of applications.
+ Yet, to our knowledge, their use to disambiguate is novel.
+ We present our implementation of an evaluator of attributes
+ for ambiguous AGs, tailored to ambiguous parse trees
+ disambiguation. This paper focuses on its implementation
+ that heavily relies on Stratego/XT, which is also used as
+ language to express the attribute rules. A companion paper
+ presents the disambiguation process in details
+ 200505-SUD-disamb.}
+}
+
+@InProceedings{ grosicki.04.icc,
+ author = {Emmanuel Grosicki and Karim Abed-Meraim and R\'eda Dehak},
+ title = {A novel method to fight the non line of sight error in
+ {AOA} measurements for mobile location},
+ booktitle = {Proceedings of the IEEE International Conference on
+ Communications (ICC)},
+ year = 2004,
+ volume = 5,
+ pages = {2794--2798},
+ address = {Paris, France},
+ month = jun,
+ project = {Image},
+ urllrde = {200406-ICC},
+ abstract = {In this contribution, a mobile location method is provided
+ using measurements from two different Base-Stations.
+ Although computationally from two different Base-Stations.
+ Although based on a simple trilateration and takes into
+ account error measurements caused by Non-Line-Of-Sight
+ (NLOS) and near-far effect. The new method attributes an
+ index of confidence for each measure, in order to allow the
+ mobile to select the two most reliable measures and not to
+ use all measures, equally.}
+}
+
+@InProceedings{ guirado.05.pdmc,
+ author = {Guillaume Guirado and {\relax Th}omas Herault and Richard
+ Lassaigne and Sylvain Peyronnet},
+ title = {Distribution, approximation and probabilistic model
+ checking},
+ booktitle = {Proceedings of the 4th international workshop on Parallel
+ and Distributed Model Checking (PDMC)},
+ year = 2005,
+ project = {APMC},
+ urllrde = {200507-Pdmc},
+ abstract = {APMC is a model checker dedicated to the quantitative
+ verification of fully probabilistic systems against LTL
+ formulas. Using a Monte-Carlo method in order to
+ efficiently approximate the verification of probabilistic
+ specifications, it could be used naturally in a distributed
+ framework. We present here the tool and his distribution
+ scheme, together with extensive performance evaluation,
+ showing the scalability of the method, even on clusters
+ containing 500+ heterogeneous workstations.}
+}
+
+@InProceedings{ hamez.07.pohll,
+ author = {Alexandre Hamez and Fabrice Kordon and Yann Thierry-Mieg},
+ title = {{libDMC}: a library to Operate Efficient Distributed Model
+ Checking},
+ booktitle = {Workshop on Performance Optimization for High-Level
+ Languages and Libraries --- associated to IPDPS'2007},
+ year = 2007,
+ project = {Verification},
+ urllrde = {200703-POHLL},
+ abstract = {Model checking is a formal verification technique that
+ allows to automatically prove that a system's behavior is
+ correct. However it is often prohibitively expensive in
+ time and memory complexity, due to the so-called state
+ space explosion problem. We present a generic
+ multi-threaded and distributed infrastructure library
+ designed to allow distribution of the model checking
+ procedure over a cluster of machines. This library is
+ generic, and is designed to allow encapsulation of any
+ model checker in order to make it distributed. Performance
+ evaluations are reported and clearly show the advantages of
+ multi-threading to occupy processors while waiting for the
+ network, with linear speedup over the number of
+ processors.}
+}
+
+@InProceedings{ hamez.08.atpn,
+ author = {Alexandre Hamez and Yann Thierry-Mieg and Fabrice Kordon},
+ title = {Hierarchical Set Decision Diagrams and Automatic
+ Saturation},
+ booktitle = {Petri Nets and Other Models of Concurrency --ICATPN 2008},
+ year = 2008,
+ project = {Verification},
+ urllrde = {200806-ATPN},
+ abstract = {Shared decision diagram representations of a state-space
+ have been shown to provide efficient solutions for
+ model-checking of large systems. However, decision diagram
+ manipulation is tricky, as the construction procedure is
+ liable to produce intractable intermediate structures
+ (a.k.a peak effect). The definition of the so-called
+ saturation method has empirically been shown to mostly
+ avoid this peak effect, and allows verification of much
+ larger systems. However, applying this algorithm currently
+ requires deep knowledge of the decision diagram
+ data-structures, of the model or formalism manipulated, and
+ a level of interaction that is not offered by the API of
+ public DD packages.
+
+ Hierarchical Set Decision Diagrams (SDD) are decision
+ diagrams in which arcs of the structure are labeled with
+ sets, themselves stored as SDD. This data structure offers
+ an elegant and very efficient way of encoding structured
+ specifications using decision diagram technology. It also
+ offers, through the concept of inductive homomorphisms,
+ unprecedented freedom to the user when defining the
+ transition relation. Finally, with very limited user input,
+ the SDD library is able to optimize evaluation of a
+ transition relation to produce a saturation effect at
+ runtime. We further show that using recursive folding, SDD
+ are able to offer solutions in logarithmic complexity with
+ respect to other DD. We conclude with some performances on
+ well known examples.}
+}
+
+@Article{ hamez.09.fi,
+ abstract = {Shared decision diagram representations of a state-space
+ provide efficient solutions for model-checking of large
+ systems. However, decision diagram manipulation is tricky,
+ as the construction procedure is liable to produce
+ intractable intermediate structures (a.k.a peak effect).
+ The definition of the so-called saturation method has
+ empirically been shown to mostly avoid this peak effect,
+ and allows verification of much larger systems. However,
+ applying this algorithm currently requires deep knowledge
+ of the decision diagram data-structures.
+
+ Hierarchical Set Decision Diagrams (SDD) are decision
+ diagrams in which arcs of the structure are labeled with
+ sets, themselves stored as SDD. This data structure offers
+ an elegant and very efficient way of encoding structured
+ specifications using decision diagram technology. It also
+ offers, through the concept of inductive homomorphisms,
+ flexibility to a user defining a transition relation. We
+ show in this paper how, with very limited user input, the
+ SDD library is able to optimize evaluation of a transition
+ relation to produce a saturation effect at runtime.
+
+ We build as an example an SDD model-checker for a
+ compositional formalism: Instantiable Petri Nets (IPN). IPN
+ define a \emph{type} as an abstract contract. Labeled P/T
+ nets are used as an elementary type. A composite type is
+ defined to hierarchically contain instances (of elementary
+ or composite type). To compose behaviors, IPN use classic
+ label synchronization semantics from process calculi.
+
+ With a particular recursive folding SDD are able to offer
+ solutions for symmetric systems in logarithmic complexity
+ with respect to other DD. Even in less regular cases, the
+ use of hierarchy in the specification is shown to be well
+ supported by SDD. Experimentations and performances are
+ reported on some well known examples. },
+ author = {Alexandre Hamez and Yann Thierry-Mieg and Fabrice Kordon},
+ date-added = {2009-05-06 16:39:07 +0200},
+ date-modified = {2009-05-06 16:48:10 +0200},
+ journal = {Fundamenta Informaticae},
+ title = {Building Efficient Model checkers using Hierarchical Set
+ Decision Diagrams and automatic Saturation},
+ year = 2009,
+ urllrde = {2009-FI}
+}
+
+@InProceedings{ hemon.08.sagt,
+ author = {S\'ebastien H\'emon and Michel de Rougemont and Miklos
+ Santha},
+ title = {Approximate {N}ash Equilibria for Multi-Player Games},
+ titre = {\'Equilibres de Nash approch\'es dans les jeux
+ multi-joueurs},
+ booktitle = {1st International Symposium on Algorithmic Games Theory},
+ year = 2008,
+ address = {Paderborn, Germany},
+ month = apr,
+ urllrde = {2008-01-18-SAGT},
+ resume = {Les \'equilibres de Nash sont des positions-cl\'es de tout
+ jeu admettant une repr\'esentation finie : en effet, quel
+ que soit le nombre de joueurs et de strat\'egies, une telle
+ position existe toujours. Lorsqu'elle est atteinte, elle
+ dissuade tout joueur de vouloir se d\'etourner de sa
+ strat\'egie actuelle, d'o\`u la notion d'\'equilibre. De
+ nombreux probl\`emes y font appel mais calculer de
+ fa\c{c}on effective l'\'equilibre demeure un probl\`eme
+ difficile. En effet, le meilleur algorithme connu pour,
+ dans le cas g\'en\'eral, calculer un \'equilibre est
+ exponentiel en le nombre de strat\'egies.
+
+ Nous pr\'esenterons ici la notion d'\'equilibres
+ approch\'es, et donnerons des r\'esultats concernant leur
+ calcul. Nous montrerons qu'il ne saurait exister
+ d'algorithmes pouvant calculer un \'equilibre, m\^eme
+ approch\'e, sans utiliser au moins, pour un joueur, un
+ nombre logarithmique de strat\'egies. Nous montrerons
+ comment calculer un \'equilibre approch\'e en temps
+ sub-exponentiel $n^{\mathcal{O}(\frac{\ln
+ n}{\varepsilon^2})}$, ce qui demeure actuellement, pour le
+ cas g\'en\'eral, la meilleure complexit\'e en pire cas.
+
+ Enfin, nous pr\'esenterons une approche inductive de
+ transfert d'approximation d'une position d'un jeu \`a deux
+ joueurs en une approximation pour un jeu \`a $r$ joueurs,
+ ce qui conf\`ere des r\'esultats novateurs dans le domaine.}
+}
+
+@InProceedings{ herault.06.qest,
+ author = {Thomas H\'erault and Richard Lassaigne and Sylvain
+ Peyronnet},
+ title = {{APMC 3.0}: Approximate verification of Discrete and
+ Continuous Time Markov Chains},
+ booktitle = {Proceedings of Qest 2006},
+ year = 2006,
+ pages = {129--130},
+ project = {APMC},
+ urllrde = {200606-Qest},
+ abstract = {In this paper, we give a brief overview of APMC
+ (Approximate Probabilistic Model Checker). APMC is a model
+ checker that implements approximate probabilistic
+ verification of probabilistic systems. It is based on
+ Monte-Carlo method and the theory of randomized
+ approximation schemes and allows to verify extremely large
+ models without explicitly representing the global
+ transition system. To avoid the state-space explosion
+ phenomenon, APMC gives an accurate approximation of the
+ satisfaction probability of the property instead of the
+ exact value, but using only a very small amount of memory.
+ The version of APMC we present in this paper can now handle
+ efficiently both discrete and continuous time probabilistic
+ systems.}
+}
+
+@InProceedings{ kenny.08.odyssey,
+ author = {Patrick Kenny and Najim Dehak and R\'eda Dehak and Vishwa
+ Gupta and Pierre Dumouchel},
+ title = {The Role of Speaker Factors in the {NIST} Extended Data
+ Task},
+ booktitle = {Proceedings of the Speaker and Language Recognition
+ Workshop (IEEE-Odyssey 2008)},
+ year = 2008,
+ address = {Stellenbosch, South Africa},
+ month = jan,
+ urllrde = {200709-ODYSSEY-C},
+ abstract = {We tested factor analysis models having various numbers of
+ speaker factors on the core condition and the extended data
+ condition of the 2006 NIST speaker recognition evaluation.
+ In order to ensure strict disjointness between training and
+ test sets, the factor analysis models were trained without
+ using any of the data made available for the 2005
+ evaluation. The factor analysis training set consisted
+ primarily of Switchboard data and so was to some degree
+ mismatched with the 2006 test data (drawn from the Mixer
+ collection). Consequently, our initial results were not as
+ good as those submitted for the 2006 evaluation. However we
+ found that we could compensate for this by a simple
+ modification to our score normalization strategy, namely by
+ using 1000 z-norm utterances in zt-norm. Our purpose in
+ varying the number of speaker factors was to evaluate the
+ eigenvoiceMAP and classicalMAP components of the
+ inter-speaker variability model in factor analysis. We
+ found that on the core condition (i.e. 2--3 minutes of
+ enrollment data), only the eigenvoice MAP component plays a
+ useful role. On the other hand, on the extended data
+ condition (i.e. 15--20 minutes of enrollment data) both the
+ classical MAP component and the eigenvoice component proved
+ to be useful provided that the number of speaker factors
+ was limited. Our best result on the extended data condition
+ (all trials) was an equal error rate of 2.2\% and a
+ detection cost of 0.011. }
+}
+
+@Article{ laplante.07.tocl,
+ author = {Sophie Laplante and Richard Lassaigne and Fr\'ed\'eric
+ Magniez and Sylvain Peyronnet and Michel de Rougemont},
+ title = {Probabilistic abstraction for model checking: an approach
+ based on property testing},
+ journal = {ACM Transactions on Computational Logic},
+ year = 2007,
+ project = {APMC},
+ month = aug,
+ volume = 8,
+ number = 4,
+ urllrde = {2006XX-TOCL},
+ abstract = {The goal of model checking is to verify the correctness of
+ a given program, on all its inputs. The main obstacle, in
+ many cases, is the intractably large size of the program's
+ transition system. Property testing is a randomized method
+ to verify whether some fixed property holds on individual
+ inputs, by looking at a small random part of that input. We
+ join the strengths of both approaches by introducing a new
+ notion of probabilistic abstraction, and by extending the
+ framework of model checking to include the use of these
+ abstractions. Our abstractions map transition systems
+ associated with large graphs to small transition systems
+ associated with small random subgraphs. This reduces the
+ original transition system to a family of small, even
+ constant-size, transition systems. We prove that with high
+ probability, ``sufficiently'' incorrect programs will be
+ rejected ($\eps$-robustness). We also prove that under a
+ certain condition (exactness), correct programs will never
+ be rejected (soundness). Our work applies to programs for
+ graph properties such as bipartiteness, $k$-colorability,
+ or any $\exists\forall$ first order graph properties. Our
+ main contribution is to show how to apply the ideas of
+ property testing to syntactic programs for such properties.
+ We give a concrete example of an abstraction for a program
+ for bipartiteness. Finally, we show that the relaxation of
+ the test alone does not yield transition systems small
+ enough to use the standard model checking method. More
+ specifically, we prove, using methods from communication
+ complexity, that the OBDD size remains exponential for
+ approximate bipartiteness.}
+}
+
+@InProceedings{ lassaigne.05.wollic,
+ author = {Richard Lassaigne and Sylvain Peyronnet},
+ title = {Probabilistic verification and approximation},
+ booktitle = {Proceedings of 12th Workshop on Logic, Language,
+ Information and Computation (Wollic)},
+ year = 2005,
+ series = {Electronic Notes in Theoretical Computer Science},
+ volume = 143,
+ pages = {101--114},
+ project = {APMC},
+ urllrde = {200507-Wollic},
+ abstract = {Model checking is an algorithmic method allowing to
+ automatically verify if a system which is represented as a
+ Kripke model satisfies a given specification.
+ Specifications are usually expressed by formulas of
+ temporal logic. The first objective of this paper is to
+ give an overview of methods able to verify probabilistic
+ systems. Models of such systems are labelled discrete time
+ Markov chains and specifications are expressed in
+ extensions of temporal logic by probabilistic operators.
+ The second objective is to focus on complexity of these
+ methods and to answer the question: can probabilistic
+ verification be efficiently approximated? In general, the
+ answer is negative. However, in many applications, the
+ specification formulas can be expressed in some positive
+ fragment of linear time temporal logic. In this paper, we
+ show how some simple randomized approximation algorithms
+ can improve the efficiency of the verification of such
+ probabilistic specifications.}
+}
+
+@InProceedings{ lazzara.11.icdar,
+ author = {Guillaume Lazzara and Roland Levillain and {\relax
+ Th}ierry G\'eraud and Yann Jacquelet and Julien Marquegnies
+ and Arthur Cr\'epin-Leblond},
+ title = {The {SCRIBO} Module of the {Olena} Platform: a Free
+ Software Framework for Document Image Analysis},
+ booktitle = {Proceedings of the 11th International Conference on
+ Document Analysis and Recognition (ICDAR)},
+ year = 2011,
+ address = {Beijing, China},
+ month = sep,
+ organization = {International Association for Pattern Recognition (IAPR)},
+ project = {Olena},
+ urllrde = {201109-ICDAR},
+ abstract = {Electronic documents are being more and more usable thanks
+ to better and more affordable network, storage and
+ computational facilities. But in order to benefit from
+ computer-aided document management, paper documents must be
+ digitized and analyzed. This task may be challenging at
+ several levels. Data may be of multiple types thus
+ requiring different adapted processing chains. The tools to
+ be developed should also take into account the needs and
+ knowledge of users, ranging from a simple graphical
+ application to a complete programming framework. Finally,
+ the data sets to process may be large. In this paper, we
+ expose a set of features that a Document Image Analysis
+ framework should provide to handle the previous issues. In
+ particular, a good strategy to address both flexibility and
+ efficiency issues is the Generic Programming (GP) paradigm.
+ These ideas are implemented as an open source module,
+ SCRIBO, built on top of Olena, a generic and efficient
+ image processing platform. Our solution features services
+ such as preprocessing filters, text detection, page
+ segmentation and document reconstruction (as XML, PDF or
+ HTML documents). This framework, composed of reusable
+ software components, can be used to create full-fledged
+ graphical applications, small utilities, or processing
+ chains to be integrated into third-party projects.},
+ keywords = {Document Image Analysis, Software Design, Reusability,
+ Free Software}
+}
+
+@InProceedings{ le-quoc.07.ntms,
+ author = {Cuong Le Quoc and Patrick Bellot and Akim Demaille},
+ title = {On the security of quantum networks: a proposal framework
+ and its capacity},
+ booktitle = {Proceedings of the 2007 International Conference on New
+ Technologies, Mobility and Security (NTMS'07)},
+ year = 2007,
+ address = {Paris, France},
+ month = may,
+ urllrde = {200705-NTMS},
+ abstract = {In large Quantum Key Distribution (QKD)-based networks,
+ intermediate nodes are necessary because of the short
+ length of QKD links. They have tendency to be used more
+ than classical networks. A realistic assumption is that
+ there are eavesdropping operations in these nodes without
+ knowledge of legitimate network participants. We develop a
+ QKD-based network framework. We present a percolation-based
+ approach to discuss about conditions of extremely high
+ secret key transmission. We propose also an adaptive
+ stochastic routing algorithm that helps on protecting keys
+ from reasonable eavesdroppers in a dense QKD network. We
+ show that under some assumptions, one could prevent
+ eavesdroppers from sniffing the secrets with an arbitrarily
+ large probability.}
+}
+
+@InProceedings{ le-quoc.07.rivf,
+ author = {Cuong Le Quoc and Patrick Bellot and Akim Demaille},
+ title = {Stochastic routing in large grid-shaped quantum networks},
+ booktitle = {Proceedings of the Fifth International Conference on
+ Computer Sciences, Research, Innovation and Vision for the
+ Future (RIVF'07)},
+ year = 2007,
+ address = {Hanoi, Vietnam},
+ month = mar,
+ isbn = {1-4244-0695-1},
+ urllrde = {200703-RIVF},
+ abstract = {This paper investigates the problem of secret key
+ transmissions for an arbitrary Alice-Bob pair in Quantum
+ Key Distribution-based networks. We develop a realistic
+ QKD-based network framework and we show that the key
+ transmission problem on such a framework can be considered
+ as a variant of the classical percolation problem. We also
+ present an adaptive stochastic routing algorithm protect
+ from inevitable eavesdroppers. Simulations were carried out
+ not only to validate our approach, but also to compute
+ critical parameters ensuring security. These results show
+ that large quantum networks with eavesdroppers do provide
+ security.},
+ keywords = {Quantum Key Distribution, QKD network, percolation theory,
+ stochastic routing}
+}
+
+@InProceedings{ le-quoc.08.ispec,
+ author = {Cuong Le Quoc and Patrick Bellot and Akim Demaille},
+ title = {Towards the World-Wide Quantum Network},
+ booktitle = {Proceedings of the 4th Information Security Practice and
+ Experience Conference (ISPEC'08)},
+ year = 2008,
+ address = {Sydney, Australia},
+ month = april,
+ urllrde = {200804-ISPEC},
+ abstract = {Quantum Key Distribution (QKD) networks are of much
+ interest due to their capacity of providing extremely high
+ security keys to network participants. Most QKD network
+ studies so far focus on trusted models where all the
+ network nodes are assumed to be perfectly secured. This
+ restricts QKD networks to be small. In this paper, we first
+ develop a novel model dedicated to large-scale QKD
+ networks, some of whose nodes could be eavesdropped
+ secretly. Then, we investigate the key transmission problem
+ in the new model by an approach based on percolation theory
+ and stochastic routing. Analyses show that under computable
+ conditions large-scale QKD networks could protect secret
+ keys with an extremely high probability. Simulations
+ validate our results.},
+ keywords = {Quantum Key Distribution, QKD network, percolation theory,
+ stochastic routing}
+}
+
+@TechReport{ lefebvre.04.tr,
+ author = {Sylvain Lefebvre and J\'er\^ome Darbon and Fabrice Neyret},
+ title = {Unified texture management for arbitrary meshes},
+ institution = {INRIA-Rhone-Alpes},
+ year = 2004,
+ number = {RR-5210},
+ address = {France},
+ month = may,
+ project = {Image},
+ urllrde = {200405-RRinria},
+ abstract = {Video games and simulators commonly use very detailed
+ textures, whose cumulative size is often larger than the
+ GPU memory. Textures may be loaded progressively, but
+ dynamically loading and transferring this large amount of
+ data in GPU memory results in loading delays and poor
+ performance. Therefore, managing texture memory has become
+ an important issue. While this problem has been (partly)
+ addressed early for the specific case of terrain rendering,
+ there is no generic texture management system for arbitrary
+ meshes. We propose such a system, implemented on today's
+ GPUs, which unifies classical solutions aimed at reducing
+ memory transfer: progressive loading, texture compression,
+ and caching strategies. For this, we introduce a new
+ algorithm -- running on GPU -- to solve the major
+ difficulty of detecting which parts of the texture are
+ required for rendering. Our system is based on three
+ components manipulating a tile pool which stores texture
+ data in GPU memory. First, the Texture Load Map determines
+ at every frame the appropriate list of texture tiles (i.e.
+ location and MIP-map level) to render from the current
+ viewpoint. Second, the Texture Cache manages the tile pool.
+ Finally, the Texture Producer loads and decodes required
+ texture tiles asynchronously in the tile pool. Decoding of
+ compressed texture data is implemented on GPU to minimize
+ texture transfer. The Texture Producer can also generate
+ procedural textures. Our system is transparent to the user,
+ and the only parameter that must be supplied at runtime is
+ the current viewpoint. No modifications of the mesh are
+ required. We demonstrate our system on large scenes
+ displayed in real time. We show that it achieves
+ interactive frame rates even in low-memory low-bandwidth
+ situations.}
+}
+
+@InProceedings{ lesage.06.isvc,
+ author = {David Lesage and J\'er\^ome Darbon and Ceyhun Burak Akg\"ul},
+ title = {An Efficient Algorithm for Connected Attribute Thinnings
+ and Thickenings},
+ booktitle = {Proceedings of the second International Conference on
+ Visual Computing},
+ year = 2006,
+ address = {Lake Tahoe, Nevada, USA},
+ month = nov,
+ project = {Image},
+ pages = {393--404},
+ volume = 4292,
+ series = {Lecture Notes in Computer Science Series},
+ publisher = {Springer-Verlag},
+ urllrde = {200611-ISVC},
+ abstract = {Connected attribute filters are anti-extensive
+ morphological operators widely used for their ability of
+ simplifying the image without moving its contours. In this
+ paper, we present a fast, versatile and easy-to-implement
+ algorithm for grayscale connected attribute thinnings and
+ thickennings, a subclass of connected filters for the wide
+ range of non-increasing attributes. We show that our
+ algorithm consumes less memory and is computationally more
+ efficient than other available methods on natural images.}
+}
+
+@Misc{ levillain.05.olenaposter,
+ author = {Roland Levillain},
+ title = {{O}lena {P}roject poster},
+ month = oct,
+ year = 2005,
+ urllrde = {200510-OlenaPoster}
+}
+
+@Misc{ levillain.05.tigerposter,
+ author = {Roland Levillain},
+ title = {{T}iger {P}roject poster},
+ month = oct,
+ year = 2005,
+ urllrde = {200510-TigerPoster}
+}
+
+@InProceedings{ levillain.09.ismm,
+ author = {Roland Levillain and {\relax Th}ierry G\'eraud and Laurent
+ Najman},
+ title = {{Milena}: Write Generic Morphological Algorithms Once, Run
+ on Many Kinds of Images},
+ booktitle = {Mathematical Morphology and Its Application to Signal and
+ Image Processing -- Proceedings of the Ninth International
+ Symposium on Mathematical Morphology (ISMM)},
+ pages = {295--306},
+ year = 2009,
+ editor = {Michael H. F. Wilkinson and Jos B. T. M. Roerdink},
+ series = {Lecture Notes in Computer Science},
+ address = {Groningen, The Netherlands},
+ month = aug,
+ publisher = {Springer Berlin / Heidelberg},
+ volume = 5720,
+ project = {Olena},
+ urllrde = {200908-ISMM},
+ abstract = {We present a programming framework for discrete
+ mathematical morphology centered on the concept of
+ genericity. We show that formal definitions of
+ morphological algorithms can be translated into actual
+ code, usable on virtually any kind of compatible images,
+ provided a general definition of the concept of image is
+ given. This work is implemented in Milena, a generic,
+ efficient, and user-friendly image processing library.},
+ keywords = {mathematical morphology, image processing operator,
+ genericity, programming}
+}
+
+@InProceedings{ levillain.10.icip,
+ author = {Roland Levillain and {\relax Th}ierry G\'eraud and Laurent
+ Najman},
+ title = {Why and How to Design a Generic and Efficient Image
+ Processing Framework: The Case of the {Milena} Library},
+ booktitle = {Proceedings of the IEEE International Conference on Image
+ Processing (ICIP)},
+ pages = {1941--1944},
+ year = 2010,
+ address = {Hong Kong},
+ month = sep,
+ project = {Olena},
+ urllrde = {201009-ICIP},
+ abstract = {Most image processing frameworks are not generic enough to
+ provide true reusability of data structures and algorithms.
+ In fact, genericity allows users to write and experiment
+ virtually any method on any compatible input(s). In this
+ paper, we advocate the use of generic programming in the
+ design of image processing software, while preserving
+ performances close to dedicated code. The implementation of
+ our proposal, Milena, a generic and efficient library,
+ illustrates the benefits of our approach.},
+ keywords = {Genericity, Image Processing, Software Design,
+ Reusability, Efficiency}
+}
+
+@InProceedings{ levillain.10.wadgmm,
+ author = {Roland Levillain and {\relax Th}ierry G\'eraud and Laurent
+ Najman},
+ title = {Writing Reusable Digital Geometry Algorithms in a Generic
+ Image Processing Framework},
+ booktitle = {Proceedings of the Workshop on Applications of Digital
+ Geometry and Mathematical Morphology (WADGMM)},
+ pages = {96--100},
+ year = 2010,
+ address = {Istanbul, Turkey},
+ month = aug,
+ url = {http://mdigest.jrc.ec.europa.eu/wadgmm2010/},
+ project = {Olena},
+ urllrde = {201008-WADGMM},
+ abstract = {Digital Geometry software should reflect the generality of
+ the underlying mathematics: mapping the latter to the
+ former requires genericity. By designing generic solutions,
+ one can effectively reuse digital geometry data structures
+ and algorithms. We propose an image processing framework
+ centered on the Generic Programming paradigm in which an
+ algorithm on the paper can be turn into a single code,
+ written once and usable with various input types. This
+ approach enables users to design and implement new methods
+ at a lower cost, try cross-domain experiments and help
+ generalize results.},
+ keywords = {Generic Programming, Interface, Skeleton, Complex}
+}
+
+@InProceedings{ levillain.11.gretsi,
+ author = {Roland Levillain and {\relax Th}ierry G\'eraud and Laurent
+ Najman},
+ title = {Une approche g\'en\'erique du logiciel pour le traitement
+ d'images pr\'eservant les performances},
+ booktitle = {Proceedings of the 23rd Symposium on Signal and Image
+ Processing (GRETSI)},
+ category = {national},
+ year = 2011,
+ address = {Bordeaux, France},
+ month = sep,
+ note = {In French.},
+ project = {Olena},
+ urllrde = {201109-GRETSI},
+ abstract = {De plus en plus d'outils logiciels modernes pour le
+ traitement d'images sont con\,c{}us en prenant en compte le
+ probl\`eme de la g\'en\'ericit\'e du code, c'est-\`a-dire
+ la possibilit\'e d'\'ecrire des algorithmes
+ r\'eutilisables, compatibles avec de nombreux types
+ d'entr\'ees. Cependant, ce choix de conception se fait
+ souvent au d\'etriment des performances du code
+ ex\'ecut\'e. Du fait de la grande vari\'et\'e des types
+ d'images existants et de la n\'ecessit\'e d'avoir des
+ impl\'ementations rapides, g\'en\'ericit\'e et performance
+ apparaissent comme des qualit\'es essentielles du logiciel
+ en traitement d'images. Cet article pr\'esente une approche
+ pr\'eservant les performances dans un framework logiciel
+ g\'en\'erique tirant parti des caract\'eristiques des types
+ de donn\'ees utilis\'es. Gr\^ace \`a celles-ci, il est
+ possible d'\'ecrire des variantes d'algorithmes
+ g\'en\'eriques offrant un compromis entre g\'en\'ericit\'e
+ et performance. Ces alternatives sont capables de
+ pr\'eserver une partie des aspects g\'en\'eriques d'origine
+ tout en apportant des gains substantiels \`a l'ex\'ecution.
+ D'apr\`es nos essais, ces optimisations g\'en\'eriques
+ fournissent des performances supportant la comparaison avec
+ du code d\'edi\'e, allant parfois m\^eme jusqu'\`a surpasser des routines
optimis\'ees manuellement.}
+}
+
+@InProceedings{ linard.10.acsd,
+ author = {Alban Linard and Emmanuel Paviot-Adet and Fabrice Kordon
+ and Didier Buchs and Samuel Charron},
+ title = {{polyDD}: Towards a Framework Generalizing Decision
+ Diagrams},
+ booktitle = {Proceedings of the 10th International Conference on
+ Application of Concurrency to System Design (ACSD)},
+ pages = {124--133},
+ year = 2010,
+ address = {Braga, Portugal},
+ month = jun,
+ project = {Verification},
+ urllrde = {201006-ACSD},
+ publisher = {IEEE Computer Society},
+ abstract = {Decision Diagrams are now widely used in model checking as
+ extremely compact representations of state spaces. Many
+ Decision Diagram categories have been developed over the
+ past twenty years based on the same principles. Each one
+ targets a specific domain with its own characteristics.
+ Moreover, each one provides its own definition. It prevents
+ sharing concepts and techniques between these structures.
+ This paper aims to propose a basis for a common Framework
+ for Decision Diagrams. It should help users of this
+ technology to define new Decision Diagram categories thanks
+ to a simple specification mechanism called Controller. This
+ enables the building of efficient Decision Diagrams
+ dedicated to a given problem.}
+}
+
+@InProceedings{ lombardy.03.ciaa,
+ author = {Sylvain Lombardy and Rapha\"el Poss and Yann
+ R\'egis-Gianas and Jacques Sakarovitch},
+ title = {Introducing {V}aucanson},
+ booktitle = {Proceedings of Implementation and Application of Automata,
+ 8th International Conference (CIAA)},
+ pages = {96--107},
+ year = 2003,
+ editor = {Springer-Verlag},
+ volume = 2759,
+ series = {Lecture Notes in Computer Science Series},
+ address = {Santa Barbara, CA, USA},
+ month = jul,
+ project = {Vaucanson},
+ urllrde = {200307-Ciaa},
+ abstract = {This paper reports on a new software platform dedicated to
+ the computation with automata and transducers, called
+ Vaucanson, the main feature of which is the capacity of
+ dealing with automata whose labels may belong to various
+ algebraic structures. The paper successively shows how
+ Vaucanson allows to program algorithms on automata in a way
+ which is very close to the mathematical expression of the
+ algorithm, describes some features of the Vaucanson
+ platform, including the fact that the very rich data
+ structure used to implement automata does not weight too
+ much on the performance and finally explains the main
+ issues of the programming design that allow to achieve both
+ genericity and efficiency.}
+}
+
+@Article{ lombardy.04.tcs,
+ author = {Sylvain Lombardy and Yann R\'egis-{G}ianas and Jacques
+ Sakarovitch},
+ title = {Introducing {V}aucanson},
+ journal = {Theoretical Computer Science},
+ volume = 328,
+ year = 2004,
+ pages = {77--96},
+ month = nov,
+ project = {Vaucanson},
+ urllrde = {200411-TCS},
+ abstract = {This paper reports on a new software platform called
+ VAUCANSON and dedicated to the computation with automata
+ and transducers. Its main feature is the capacity of
+ dealing with automata whose labels may belong to various
+ algebraic structures. The paper successively describes the
+ main features of the VAUCANSON platform, including the fact
+ that the very rich data structure used to implement
+ automata does not weigh too much on the performance, shows
+ how VAUCANSON allows to program algorithms on automata in a
+ way which is very close to the mathematical expression of
+ the algorithm and finally explains the main choices of the
+ programming design that enable to achieve both genericity
+ and efficiency.}
+}
+
+@InProceedings{ maes.03.dpcool,
+ author = {Francis Maes},
+ title = {Program templates: expression templates applied to program
+ evaluation},
+ booktitle = {Proceedings of the Workshop on Declarative Programming in
+ the Context of Object-Oriented Languages (DP-COOL; in
+ conjunction with PLI)},
+ year = 2003,
+ address = {Uppsala, Sweden},
+ number = {FZJ-ZAM-IB-2003-10},
+ pages = {67--86},
+ editor = {J\"org Striegnitz and Kei Davis},
+ month = aug,
+ series = {John von Neumann Institute for Computing (NIC)},
+ project = {Software},
+ urllrde = {20020619-Seminar-Maes-Report},
+ abstract = {The C++ language provides a two-layer execution model:
+ static execution of meta-programs and dynamic execution of
+ resulting programs. The Expression Templates technique
+ takes advantage of this dual execution model through the
+ construction of C++ types expressing simple arithmetic
+ formulas. Our intent is to extend this technique to a whole
+ programming language. The Tiger language is a small,
+ imperative language with types, variables, arrays, records,
+ ow control structures and nested functions. The rst step is
+ to show how to express a Tiger program as a C++ type. The
+ second step concerns operational analysis which is done
+ through the use of meta-programs. Finally an implementation
+ of our Tiger evaluator is proposed. Our technique goes much
+ deeper than the Expression Templates one. It shows how the
+ generative power of C++ meta-programming can be used in
+ order to compile abstract syntax trees of a fully featured
+ programming language.}
+}
+
+@InProceedings{ maes.04.mpool,
+ author = {Francis Maes},
+ title = {Metagene, a {C++} meta-program generation tool},
+ booktitle = {Proceedings of the Workshop on Multiple Paradigm with OO
+ Languages (MPOOL; in conjunction with ECOOP)},
+ year = 2004,
+ address = {Oslo, Norway},
+ month = jun,
+ project = {Software},
+ urllrde = {200406-MPOOL},
+ abstract = {The C++ language offers a two layer evaluation model.
+ Thus, it is possible to evaluate a program in two steps:
+ the so-called static and dynamic evaluations. Static
+ evaluation is used for reducing the amount of work done at
+ execution-time. Programs executed statically (called
+ metaprograms) are written in C++ through an intensive use
+ of template classes. Due to the complexity of these
+ structures, writing, debugging and maintaining C++
+ meta-programs is a difficult task. Metagene is a program
+ transformation tool which simplifies the development of
+ such programs. Due to the similarities between C++
+ meta-programming and functional programming, the input
+ language of Metagene is an ML language. Given a functional
+ input program, Metagene outputs the corresponding C++
+ meta-program expressed using template classes.}
+}
+
+@InProceedings{ minetto.10.icip,
+ author = {Rodrigo Minetto and Nicolas Thome and Matthieu Cord and
+ Jonathan Fabrizio and Beatriz Marcotegui},
+ title = {SnooperText: A Multiresolution System for Text Detection
+ in Complex Visual Scenes},
+ booktitle = {Proceedings of the IEEE International Conference on Image
+ Processing (ICIP)},
+ pages = {3861--3864},
+ year = 2010,
+ address = {Hong Kong},
+ month = sep,
+ abstract = {Text detection in natural images remains a very
+ challenging task. For instance, in an urban context, the
+ detection is very difficult due to large variations in
+ terms of shape, size, color, orientation, and the image may
+ be blurred or have irregular illumination, etc. In this
+ paper, we describe a robust and accurate multiresolution
+ approach to detect and classify text regions in such
+ scenarios. Based on generation/validation paradigm, we
+ first segment images to detect character regions with a
+ multiresolution algorithm able to manage large character
+ size variations. The segmented regions are then filtered
+ out using shapebased classification, and neighboring
+ characters are merged to generate text hypotheses. A
+ validation step computes a region signature based on
+ texture analysis to reject false positives. We evaluate our
+ algorithm in two challenging databases, achieving very good
+ results},
+ keywords = {Text detection, multiresolution, image segmentation,
+ machine learning}
+}
+
+@InProceedings{ perrot.06.nist,
+ author = {Patrick Perrot and R\'eda Dehak and G\'erard Chollet},
+ title = {{ENST-IRCGN} System Description},
+ booktitle = {NIST SRE'06 Workshop: speaker recognition evaluation
+ campaign},
+ year = 2006,
+ address = {San Juan, Puerto Rico},
+ month = jun,
+ urllrde = {200606-NIST-B}
+}
+
+@Misc{ pouillard.05.sud,
+ author = {Akim Demaille and {\relax Th}omas Largillier and Nicolas
+ Pouillard},
+ title = {{ESDF}: A proposal for a more flexible {SDF} handling},
+ note = {Communication to Stratego Users Day 2005},
+ year = 2005,
+ address = {Utrecht {U}niversity, {N}etherland},
+ month = may,
+ project = {Transformers},
+ urllrde = {200505-SUD-esdf},
+ abstract = {By the means on its annotations, Syntax Definition
+ Formalism (SDF) seems to be extensible: the user is tempted
+ to tailor its grammar syntax by adding new annotation
+ kinds. Unfortunately the standard SDF crunching tools from
+ Stratego/XT do not support the extension of SDF, and the
+ user has to develop the whole set of tools for her home
+ grown extension(s). We present the SDF tool set that
+ provides ``weak'' genericity with respect to the grammar
+ grammar: support for arbitrary SDF annotations. We would
+ like to contribute it to Stratego/XT since its components
+ subsume their stock peers. Finally, we present a set of
+ four extensions we find useful.}
+}
+
+@InProceedings{ regisgianas.03.poosc,
+ author = {Yann R\'egis-Gianas and Rapha\"el Poss},
+ title = {On orthogonal specialization in {C++}: dealing with
+ efficiency and algebraic abstraction in {V}aucanson},
+ booktitle = {Proceedings of the Parallel/High-performance
+ Object-Oriented Scientific Computing (POOSC; in conjunction
+ with ECOOP)},
+ year = 2003,
+ number = {FZJ-ZAM-IB-2003-09},
+ pages = {71--82},
+ editor = {J\"org Striegnitz and Kei Davis},
+ series = {John von Neumann Institute for Computing (NIC)},
+ address = {Darmstadt, Germany},
+ month = jul,
+ project = {Vaucanson},
+ urllrde = {200307-Poosc},
+ abstract = {Vaucanson is a C++ generic library for weighted finite
+ state machine manipulation. For the sake of generality, FSM
+ are defined using algebraic structures such as alphabet
+ (for the letters), free monoid (for the words), semiring
+ (for the weights) and series (mapping from words to
+ weights). As usual, what is at stake is to maintain
+ efficiency while providing a high-level layer for the
+ writing of generic algorithms. Yet, one of the
+ particularities of FSM manipulation is the need of a fine
+ grained specialization power on an object which is both an
+ algebraic concept and an intensive computing machine.}
+}
+
+@InProceedings{ ricou.07.adass,
+ author = {Olivier Ricou and Anthony Baillard and Emmanuel Bertin and
+ Frederic Magnard and Chiara Marmo and Yannick Mellier},
+ title = {Web services at {TERAPIX}},
+ booktitle = {Proceedings of the XVII conference on Astronomical Data
+ Analysis Software \& Systems (ADASS)},
+ month = sep,
+ year = 2007,
+ urllrde = {200709-ADASS},
+ abstract = {We present an implementation of V.O.-compliant web
+ services built around software tools developed at the
+ TERAPIX centre. These services allow to operate from a
+ remote site several pipeline tasks dedicated to
+ astronomical data processing on the TERAPIX cluster,
+ including the latest EFIGI morphological analysis tool.}
+}
+
+@InProceedings{ ricou.07.eceg,
+ author = {Olivier Ricou},
+ title = {10 years of confrontation between {French} {Internet}
+ users and their successive governments},
+ booktitle = {Proceedings of the 7th European Conference on e-Government
+ (ECEG)},
+ month = jun,
+ year = 2007,
+ urllrde = {200706-ECEG},
+ abstract = {This paper is a testimony on the relations between the
+ Internet users and their governments in France during the
+ last decade. It shows the complexity of communication
+ between two worlds that are strangers to each other. Since
+ most of the confrontation occurred over law proposals, it
+ analyses their impact on Internet users and focuses on two
+ examples. These example show the failure of Internet as a
+ political medium. French politicians do not seem to want an
+ active participation of the citizens in decisionmaking
+ processes. In order to end this paper on an optimistic
+ note, the last section enumerates the achievements of
+ egovernment which contributed preparing for a better
+ democracy by increasing transparency, accountability, and
+ education. This might push citizens to ask for more. }
+}
+
+@InProceedings{ ricou.08.eceg,
+ author = {Olivier Ricou},
+ title = {A Survey of {French} Local e-Democracy},
+ booktitle = {Proceedings of the 8th European Conference on e-Government
+ (ECEG)},
+ month = jul,
+ year = 2008,
+ urllrde = {200807-ECEG},
+ abstract = {Since the end of the last century, the Internet has shown
+ that it is a different media, a media of citizen
+ journalists. This paper surveys e-democratic tools used at
+ the local level in France in order to see how the Internet
+ can change our democracy and people's participation. It
+ describes the official tools provided by municipalities and
+ administrations as well as citizens' tools, like blogs,
+ which become more and more important in today's democratic
+ debate. It analyses how they help for more transparency,
+ accountability and participation, which might lead to
+ define new democratic rules.}
+}
+
+@TechReport{ vaucanson.04.techrep,
+ author = {The \textsc{Vaucanson} group},
+ title = {Proposal: an {XML} representation for automata},
+ institution = {EPITA Research and Development Laboratory (LRDE)},
+ year = 2004,
+ number = 0414,
+ address = {France},
+ month = nov,
+ url = {http://www.lrde.epita.fr/cgi-bin/twiki/view/Publications/200414-TR}
+ ,
+ project = {Vaucanson},
+ urllrde = {200414-TR}
+}
+
+@InProceedings{ verna.00.vsmm,
+ author = {Didier Verna},
+ title = {Action recognition: how intelligent virtual environments
+ can ease human-machine interaction},
+ booktitle = {Proceedings of the 6th International Conference on Virtual
+ Systems and MultiMedia (VSMM)---Intelligent Environments
+ Workshop},
+ pages = {703--713},
+ year = 2000,
+ address = {Gifu, Japan},
+ month = oct,
+ publisher = {IOS Press, USA},
+ isbn = {1-58603-108-2},
+ project = {Urbi},
+ urllrde = {200010-Vsmm-2},
+ abstract = {This paper describes a research that has been conducted in
+ the field of cognitive assistance to human-machine
+ interaction in virtual environments. The idea is to design
+ a system which, bearing in mind the actions performed by
+ the operator at present and the current state of the
+ environment, attempts to determine the global operation
+ that the user is in the process of executing, and
+ eventually takes control of the same process in order to
+ complete it automatically. This idea implies the conception
+ of an action recognition mechanism based on a specific
+ knowledge representation model. This mechanism is
+ implemented in a computer demonstrator, known as the TOASt
+ system, which is also presented.}
+}
+
+@InProceedings{ verna.01.sci,
+ author = {Didier Verna},
+ title = {Virtual reality and tele-operation: a common framework},
+ booktitle = {Proceedings of the 5th World Multi-Conference on
+ Systemics, Cybernetics and Informatics (SCI)---Emergent
+ Computing and Virtual Engineering},
+ year = 2001,
+ volume = 3,
+ pages = {499--504},
+ address = {Orlando, Florida, USA},
+ month = jul,
+ editors = {N. Callas and S. Esquivel and J. Burge},
+ project = {Urbi},
+ urllrde = {200107-Sci},
+ abstract = {This paper proposes an overview of a study that
+ conceptually unify the fields of virtual reality and
+ tele-operation, by analyzing the notion of ``assistance''
+ to the operator of a virtual reality or tele-operation
+ system. This analysis demonstrates that cases of assistance
+ that are usually considered to belong to virtual reality
+ are not conceptually different from what has been done in
+ tele-operation since long before virtual reality appeared.
+ With this common framework for virtual reality and
+ tele-operation, we hope to provide a theoretical
+ formalization of many ideas acquired empirically, and hence
+ a basis onto which further discussion could be undertaken
+ in a constructive manner.}
+}
+
+@InProceedings{ verna.06.ecoop,
+ author = {Didier Verna},
+ title = {Beating {C} in Scientific Computing Applications},
+ booktitle = {Third European Lisp Workshop at ECOOP},
+ year = 2006,
+ address = {Nantes, France},
+ month = jul,
+ note = {Best paper award.},
+ project = {Software},
+ urllrde = {200607-ECOOP},
+ abstract = {This paper presents an ongoing research on the behavior
+ and performance of Lisp with respect to C in the context of
+ scientific numerical computing. Several simple image
+ processing algorithms are used to evaluate the performance
+ of pixel access and arithmetic operations in both
+ languages. We demonstrate that the behavior of equivalent
+ Lisp and C code is similar with respect to the choice of
+ data structures and types, and also to external parameters
+ such as hardware optimization. We further demonstrate that
+ properly typed and optimized Lisp code runs as fast as the
+ equivalent C code, or even faster in some cases.}
+}
+
+@Article{ verna.06.ijcs,
+ author = {Didier Verna},
+ title = {How to make Lisp go faster than {C}},
+ journal = {IAENG International Journal of Computer Science},
+ year = 2006,
+ volume = 32,
+ number = 4,
+ month = dec,
+ issn = {1819-656X},
+ project = {Software},
+ urllrde = {200606-IMECS},
+ abstract = {Contrary to popular belief, Lisp code can be very
+ efficient today: it can run as fast as equivalent C code or
+ even faster in some cases. In this paper, we explain how to
+ tune Lisp code for performance by introducing the proper
+ type declarations, using the appropriate data structures
+ and compiler information. We also explain how efficiency is
+ achieved by the compilers. These techniques are applied to
+ simple image processing algorithms in order to demonstrate
+ the announced performance on pixel access and arithmetic
+ operations in both languages.}
+}
+
+@InProceedings{ verna.06.imecs,
+ author = {Didier Verna},
+ title = {How to make Lisp go faster than {C}},
+ booktitle = {Proceedings of the International MultiConference of
+ Engineers and Computer Scientists},
+ year = 2006,
+ address = {Hong Kong},
+ month = jun,
+ organization = {International Association of Engineers},
+ isbn = {988-98671-3-3},
+ project = {Software},
+ urllrde = {200606-IMECS},
+ abstract = {Contrary to popular belief, Lisp code can be very
+ efficient today: it can run as fast as equivalent C code or
+ even faster in some cases. In this paper, we explain how to
+ tune Lisp code for performance by introducing the proper
+ type declarations, using the appropriate data structures
+ and compiler information. We also explain how efficiency is
+ achieved by the compilers. These techniques are applied to
+ simple image processing algorithms in order to demonstrate
+ the announced performance on pixel access and arithmetic
+ operations in both languages.}
+}
+
+@Article{ verna.06.practex,
+ author = {Didier Verna},
+ year = 2006,
+ volume = 2006,
+ number = 3,
+ month = aug,
+ project = {Software},
+ urllrde = {200608-PracTeX},
+ abstract = {This paper presents {\CurVe}, a curriculum vitae class for
+ \LaTeX2e, in a progressive approach going from a first
+ contact with the class, through concrete examples of
+ customization, and some aspects of advanced usage.}
+}
+
+@InProceedings{ verna.07.imecs,
+ author = {Didier Verna},
+ title = {{CLOS} solutions to binary methods},
+ booktitle = {Proceedings of the International MultiConference of
+ Engineers and Computer Scientists},
+ year = 2007,
+ address = {Hong Kong},
+ month = mar,
+ organization = {International Association of Engineers},
+ project = {Software},
+ urllrde = {200703-IMECS},
+ abstract = {Implementing binary methods in traditional object oriented
+ languages is difficult: numerous problems arise, such as
+ typing (covariance vs. contra-variance of the arguments),
+ polymorphism on multiple arguments (lack of multi-methods)
+ etc. The purpose of this paper is to demonstrate how those
+ problems are either solved, or nonexistent in the Common
+ Lisp Object System (CLOS). Several solutions for different
+ levels of binary methods support in CLOS are proposed. They
+ mainly consist in re-programming a binary method specific
+ object system through the CLOS meta-object protocol.}
+}
+
+@InProceedings{ verna.08.els,
+ author = {Didier Verna},
+ title = {Binary Methods Programming: the {CLOS} Perspective},
+ booktitle = {Proceedings of the First European Lisp Symposium},
+ pages = {91--105},
+ year = 2008,
+ address = {Bordeaux, France},
+ month = may,
+ project = {Software},
+ urllrde = {200805-ELS},
+ abstract = {Implementing binary methods in traditional object-oriented
+ languages is difficult: numerous problems arise regarding
+ the relationship between types and classes in the context
+ of inheritance, or the need for privileged access to the
+ internal representation of objects. Most of these problems
+ occur in the context of statically typed languages that
+ lack multi-methods (polymorphism on multiple arguments).
+ The purpose of this paper is twofold: first, we show why
+ some of these problems are either non-issues, or easily
+ solved in Common Lisp. Then, we demonstrate how the Common
+ Lisp Object System (CLOS) allows us not only to implement
+ binary methods in a straightforward way, but also to
+ support the concept directly, and even enforce it at
+ different levels (usage and implementation).}
+}
+
+@Article{ verna.08.jucs,
+ author = {Didier Verna},
+ title = {Binary Methods Programming: the {CLOS} Perspective
+ (extended version)},
+ journal = {Journal of Universal Computer Science},
+ year = 2008,
+ volume = 14,
+ number = 20,
+ pages = {3389--3411},
+ project = {Software},
+ urllrde = {200811-JUCS},
+ abstract = {Implementing binary methods in traditional object-oriented
+ languages is difficult: numerous problems arise regarding
+ the relationship between types and classes in the context
+ of inheritance, or the need for privileged access to the
+ internal representation of objects. Most of these problems
+ occur in the context of statically typed languages that
+ lack multi-methods (polymorphism on multiple arguments).
+ The purpose of this paper is twofold: first, we show why
+ some of these problems are either non-issues, or easily
+ solved in Common Lisp. Then, we demonstrate how the Common
+ Lisp Object System (CLOS) allows us not only to implement
+ binary methods in a straightforward way, but also to
+ support the concept directly, and even enforce it at
+ different levels (usage and implementation).}
+}
+
+@InProceedings{ verna.08.lncs,
+ author = {Didier Verna and Charlotte Herzeel and Christophe Rhodes
+ and Hans H\"ubner},
+ title = {Report on the 5th Workshop {ELW} at {ECOOP 2008}},
+ booktitle = {Object-Oriented Technology. ECOOP 2008 Workshop Reader.},
+ pages = {1--6},
+ year = 2008,
+ editor = {Patrick Eugster},
+ volume = 5475,
+ series = {Lecture Notes in Computer Science},
+ month = jul,
+ publisher = {Springer}
+}
+
+@InProceedings{ verna.09.accu,
+ author = {Didier Verna},
+ title = {Revisiting the Visitor: the Just Do It Pattern},
+ booktitle = {Proceedings of the ACCU Conference 2009},
+ year = 2009,
+ address = {Oxford},
+ project = {Software},
+ urllrde = {200904-ACCU},
+ abstract = { },
+ note = {Accepted}
+}
+
+@InProceedings{ verna.09.ilc,
+ author = {Didier Verna},
+ title = {{CLOS} Efficiency: Instantiation},
+ booktitle = {Proceedings of the International Lisp Conference},
+ year = 2009,
+ month = mar,
+ pages = {76--90},
+ organization = {Association of Lisp Users},
+ urllrde = {200903-ILC},
+ abstract = {This article reports the results of an ongoing
+ experimental research on the behavior and performance of
+ CLOS, the Common Lisp Object System. Our purpose is to
+ evaluate the behavior and performance of the 3 most
+ important characteristics of any dynamic Object Oriented
+ system: class instantiation, slot access and dynamic
+ dispatch. This paper describes the results of our
+ experiments on instantiation. We evaluate the efficiency of
+ the instantiation process in both C++ and Lisp under a
+ combination of parameters such as slot types or classes
+ hierarchy. We show that in a non-optimized configuration
+ where safety is given priority on speed, the behavior of
+ C++ and Lisp instantiation can be quite different, which is
+ also the case amongst different Lisp compilers. On the
+ other hand, we demonstrate that when compilation is tuned
+ for speed, instantiation in Lisp becomes faster than in
+ C++.}
+}
+
+@InProceedings{ verna.10.els,
+ author = {Didier Verna},
+ title = {{CLoX}: {C}ommon {L}isp objects for {XEmacs}},
+ booktitle = {Proceedings of the 3rd European Lisp Symposium},
+ year = 2010,
+ address = {Lisbon, Portugal},
+ month = may,
+ project = {Software},
+ urllrde = {201005-ELS},
+ abstract = {CLoX is an ongoing attempt to provide a full Emacs Lisp
+ implementation of the Common Lisp Object System, including
+ its underlying meta-object protocol, for XEmacs. This paper
+ describes the early development stages of this project.
+ CLoX currently consists in a port of Closette to Emacs
+ Lisp, with some additional features, most notably, a deeper
+ integration between types and classes and a comprehensive
+ test suite. All these aspects are described in the paper,
+ and we also provide a feature comparison with an
+ alternative project called Eieio.}
+}
+
+@Article{ verna.10.jucs,
+ author = {Didier Verna},
+ title = {Revisiting the Visitor: the Just Do It Pattern},
+ journal = {Journal of Universal Computer Science},
+ year = 2010,
+ volume = 16,
+ pages = {246--271},
+ project = {Software},
+ urllrde = {201004-JUCS},
+ abstract = {While software design patterns are a generally useful
+ concept, they are often (and mistakenly) seen as ready-made
+ universal recipes for solving common problems. In a way,
+ the danger is that programmers stop thinking about their
+ actual problem, and start looking for pre-cooked solutions
+ in some design pattern book instead. What people usually
+ forget about design patterns is that the underlying
+ programming language plays a major role in the exact shape
+ such or such pattern will have on the surface. The purpose
+ of this paper is twofold: we show why design pattern
+ expression is intimately linked to the expressiveness of
+ the programming language in use, and we also demonstrate
+ how a blind application of them can in fact lead to very
+ poorly designed code.}
+}
+
+@InProceedings{ verna.10.tug,
+ author = {Didier Verna},
+ title = {Classes, Styles, Conflicts: the Biological Realm of
+ {\LaTeX}},
+ booktitle = {TUGboat},
+ pages = {162--172},
+ year = 2010,
+ editor = {Barbara Beeton and Karl Berry},
+ volume = 31,
+ number = 2,
+ project = {Software},
+ urllrde = {201006-TUG},
+ abstract = {The \LaTeX{} world is composed of thousands of software
+ components, most notably classes and styles. Classes and
+ styles are born, evolve or die, interact with each other,
+ compete or cooperate, very much as living organisms do at
+ the cellular level. This paper attempts to draw an extended
+ analogy between the \LaTeX{} biotope and cellular biology.
+ By considering \LaTeX{} documents as living organisms and
+ styles as viruses that infect them, we are able to exhibit
+ a set of behavioral patterns common to both worlds. We
+ analyze infection methods, types and cures, and we show how
+ \LaTeX{} or cellular organisms are able to survive in a
+ world of perpetual war.}
+}
+
+@InProceedings{ verna.11.onward,
+ author = {Didier Verna},
+ title = {Biological Realms in Computer Science: the Way You Don't
+ (Want To) Think About Them},
+ booktitle = {Onward! 2011},
+ year = {2011},
+ note = {Accepted},
+ abstract = {In biology, evolution is usually seen as a tinkering
+ process, different from what an engineer does when he plans
+ the development of his systems. Recently, studies have
+ shown that even in biology, there is a part of good
+ engineering. As computer scientists, we have much more
+ difficulty to admit that there is also a great deal of
+ tinkering in what we do, and that our software systems
+ behave more and more like biological realms every day. This
+ essay relates my personal experience about this
+ discovery.}
+}
+
+@InProceedings{ verna.11.tug,
+ author = {Didier Verna},
+ title = {Towards {\LaTeX} Coding Standards},
+ booktitle = {TUGboat},
+ OPTpages = {00--00},
+ year = 2011,
+ OPTeditor = {Barbara Beeton and Karl Berry},
+ OPTvolume = 00,
+ OPTnumber = 0,
+ project = {Software},
+ OPTurllrde = {201100-TUG},
+ note = {Accepted},
+ abstract = {Because \LaTeX is only a macro-expansion system, the
+ language does not impose any kind of good software
+ engineering practice, program structure or coding style.
+ Maybe because in the \LaTeX world, collaboration is not so
+ widespread, the idea of some LaTeX Coding Standards is not
+ so pressing as with other programming languages. Over the
+ years, the permanent flow of personal development
+ experiences contributed to shape our own taste in terms of
+ coding style. In this paper, we report on all these
+ experiences and describe what we think are good programming
+ practices.}
+}
+
+@InProceedings{ xue.03.icip,
+ author = {Heru Xue and {\relax Th}ierry G\'eraud and Alexandre
+ Duret-Lutz},
+ title = {Multi-band segmentation using morphological clustering and
+ fusion application to color image segmentation},
+ booktitle = {Proceedings of the IEEE International Conference on Image
+ Processing (ICIP)},
+ year = 2003,
+ pages = {353--356},
+ volume = 1,
+ address = {Barcelona, Spain},
+ month = sep,
+ project = {Image},
+ urllrde = {200309-Icip},
+ abstract = {In this paper we propose a novel approach for color image
+ segmentation. Our approach is based on segmentation of
+ subsets of bands using mathematical morphology followed by
+ the fusion of the resulting segmentation channels. For
+ color images the band subsets are chosen as RG, RB and GB
+ pairs, whose 2D histograms are processed as projections of
+ a 3D histogram. The segmentations in 2D color spaces are
+ obtained using the watershed algorithm. These 2D
+ segmentations are then combined to obtain a final result
+ using a region split-and-merge process. The CIE L a b color
+ space is used to measure the color distance. Our approach
+ results in improved performance and can be generalized for
+ multi-band segmentation of images such as multi-spectral
+ satellite images information.}
+}
+
+@InProceedings{ yoruk.04.eusipco,
+ author = {Erdem Yoruk and Ender Konukoglu and Bulent Sankur and
+ J\'er\^ome Darbon},
+ title = {Person authentication based on hand shape},
+ booktitle = {Proceedings of 12th European Signal Processing Conference
+ (EUSIPCO)},
+ year = 2004,
+ address = {Vienna, Austria},
+ month = sep,
+ project = {Image},
+ urllrde = {200409-EUSIPCO},
+ abstract = {The problem of person identification based on their hand
+ images has been addressed. The system is based on the
+ images of the right hands of the subjects, captured by a
+ flatbed scanner in an unconstrained pose. In a
+ preprocessing stage of the algorithm, the silhouettes of
+ hand images are registered to a fixed pose, which involves
+ both rotation and translation of the hand and, separately,
+ of the individual fingers. Independent component features
+ of the hand silhouette images are used for recognition. The
+ classification performance is found to be very satisfactory
+ and it was shown that, at least for groups of one hundred
+ subjects, hand-based recognition is a viable secure access
+ control scheme.}
+}
+
+@Article{ yoruk.06.itip,
+ author = {Erdem Y\"or\"uk and Ender Konukoglu and B\"ulent Sankur
+ and J\'er\^ome Darbon},
+ title = {Shape-based hand recognition},
+ journal = {IEEE Transactions on Image Processing},
+ year = 2006,
+ volume = 15,
+ number = 7,
+ pages = {1803--1815},
+ month = jul,
+ project = {Image},
+ urllrde = {2006XX-ITIP},
+ abstract = {The problem of person recognition and verification based
+ on their hand images has been addressed. The system is
+ based on the images of the right hands of the subjects,
+ captured by a flatbed scanner in an unconstrained pose at
+ 45 dpi. In a preprocessing stage of the algorithm, the
+ silhouettes of hand images are registered to a fixed pose,
+ which involves both rotation and translation of the hand
+ and, separately, of the individual fingers. Two feature
+ sets have been comparatively assessed, Hausdorff distance
+ of the hand contours and independent component features of
+ the hand silhouette images. Both the classification and the
+ verification performances are found to be very satisfactory
+ as it was shown that, at least for groups of about five
+ hundred subjects, hand-based recognition is a viable secure
+ access control scheme.}
+}
+
+%% Local Variables:
+%% fill-column: 76
+%% ispell-local-dictionary: "american"
+%% End:
diff --git a/doc/mainpage.hh b/doc/mainpage.dox
similarity index 80%
rename from doc/mainpage.hh
rename to doc/mainpage.dox
index 116c69a..8f49ca3 100644
--- a/doc/mainpage.hh
+++ b/doc/mainpage.dox
@@ -13,8 +13,8 @@
<tr>
<td>
\li What is Olena ?
- \li \ref bibliography
- \li Contributors
+ \li <a class="el"
href="http://www.lrde.epita.fr/cgi-bin/twiki/view/Olena/Publications">Bibliography</a>
+ \li <a class="el" href="a00028.html">Contributors</a>
</td>
<td>
\li \ref moduleslist
@@ -23,7 +23,7 @@
\li Swilena
</td>
<td>
- \li Online Demos
+ \li <a class="el"
href="http://www.lrde.epita.fr/cgi-bin/twiki/view/Olena/Demos">Online
Demos</a>
\li Papers Related Programs
\li Olena Powered Programs
</td>
diff --git a/doc/modules_list.hh b/doc/modules_list.dox
similarity index 100%
rename from doc/modules_list.hh
rename to doc/modules_list.dox
diff --git a/doc/olena.qhcp b/doc/olena.qhcp
deleted file mode 100644
index 595d34e..0000000
--- a/doc/olena.qhcp
+++ /dev/null
@@ -1,12 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<QHelpCollectionProject version="1.0">
- <assistant>
-
<startPage>qthelp://fr.epita.lrde.olena/milena-2.0/index.html</startPage>
- </assistant>
- <docFiles>
- <register>
- <file>../milena/doc/user-refman/html/milena.qch</file>
- <file>../scribo/doc/user-refman/html/scribo.qch</file>
- </register>
- </docFiles>
-</QHelpCollectionProject>
\ No newline at end of file
diff --git a/scribo/doc/Doxyfile.in b/scribo/doc/Doxyfile.in
index d279d76..7ad639f 100644
--- a/scribo/doc/Doxyfile.in
+++ b/scribo/doc/Doxyfile.in
@@ -26,7 +26,7 @@ PROJECT_LOGO = @top_srcdir@/doc/logo.jpg
OUTPUT_DIRECTORY = @builddir@/user-refman.tmp
CREATE_SUBDIRS = YES
OUTPUT_LANGUAGE = English
-BRIEF_MEMBER_DESC = YES
+BRIEF_MEMBER_DESC = NO
REPEAT_BRIEF = YES
ABBREVIATE_BRIEF =
ALWAYS_DETAILED_SEC = YES
@@ -47,6 +47,8 @@ BUILTIN_STL_SUPPORT = YES
IDL_PROPERTY_SUPPORT = NO
DISTRIBUTE_GROUP_DOC = NO
SUBGROUPING = YES
+INLINE_GROUPED_CLASSES = NO
+SORT_GROUP_NAMES = YES
#---------------------------------------------------------------------------
# Build related configuration options
#---------------------------------------------------------------------------
@@ -95,7 +97,8 @@ INPUT = @top_srcdir@/scribo
FILE_PATTERNS = *.cc \
*.hh \
*.hxx \
- *.hcc
+ *.hcc \
+ *.dox
RECURSIVE = YES
EXCLUDE = @top_srcdir@/scribo/demo \
@top_srcdir@/scribo/sandbox \
@@ -126,8 +129,8 @@ INPUT_ENCODING = ISO-8859-1
SOURCE_BROWSER = YES
INLINE_SOURCES = NO
STRIP_CODE_COMMENTS = YES
-REFERENCED_BY_RELATION = YES
-REFERENCES_RELATION = YES
+REFERENCED_BY_RELATION = NO
+REFERENCES_RELATION = NO
REFERENCES_LINK_SOURCE = NO
USE_HTAGS = NO
VERBATIM_HEADERS = YES
@@ -329,7 +332,7 @@ SKIP_FUNCTION_MACROS = YES
TAGFILES =
@builddir@/../../milena/doc/milena.tag=../../../../milena/doc/user-refman/html
GENERATE_TAGFILE = scribo.tag
ALLEXTERNALS = NO
-EXTERNAL_GROUPS = YES
+EXTERNAL_GROUPS = NO
PERL_PATH = /usr/bin/perl
#---------------------------------------------------------------------------
# Configuration options related to the dot tool
diff --git a/scribo/doc/groups.dox b/scribo/doc/groups.dox
new file mode 100644
index 0000000..73d842e
--- /dev/null
+++ b/scribo/doc/groups.dox
@@ -0,0 +1,19 @@
+/*! \defgroup grpstruct Data structures
+ *
+ * \brief Scribo high level data structures.
+ *
+ */
+
+
+/*! \defgroup grproutine Routines
+ *
+ * \brief All routines/algorithms provided in Scribo.
+ *
+ */
+
+
+/*! \defgroup grpalgobin Binarization
+ *
+ * \brief Binarization algorithms.
+ *
+ */
diff --git a/scribo/scribo/core/component_set.hh b/scribo/scribo/core/component_set.hh
index ded64ae..8c3e83f 100644
--- a/scribo/scribo/core/component_set.hh
+++ b/scribo/scribo/core/component_set.hh
@@ -119,6 +119,10 @@ namespace scribo
} // end of namespace scribo::internal
+ /// \brief Represents all the components in a document image.
+ ///
+ /// \ingroup grpstruct
+ //
template <typename L>
class component_set : public Serializable<component_set<L> >
{
diff --git a/scribo/scribo/core/document.hh b/scribo/scribo/core/document.hh
index cc885e4..5bbd226 100644
--- a/scribo/scribo/core/document.hh
+++ b/scribo/scribo/core/document.hh
@@ -79,9 +79,12 @@ namespace scribo
} // end of namespace scribo::internal
-
+ /// \brief Represent document data and structure.
+ ///
+ /// \ingroup grpstruct
+ //
template <typename L>
- struct document : public Serializable<document<L> >
+ class document : public Serializable<document<L> >
{
typedef internal::document_data<L> data_t;
diff --git a/scribo/scribo/core/line_links.hh b/scribo/scribo/core/line_links.hh
index b7b438c..4f807e4 100644
--- a/scribo/scribo/core/line_links.hh
+++ b/scribo/scribo/core/line_links.hh
@@ -67,7 +67,9 @@ namespace scribo
- /// \brief Line group representation.
+ /// \brief Line links representation.
+ ///
+ /// \ingroup grpstruct
//
template <typename L>
class line_links : public Serializable<line_links<L> >
diff --git a/scribo/scribo/core/line_set.hh b/scribo/scribo/core/line_set.hh
index db987e6..4880a77 100644
--- a/scribo/scribo/core/line_set.hh
+++ b/scribo/scribo/core/line_set.hh
@@ -1,5 +1,5 @@
-// Copyright (C) 2009, 2010 EPITA Research and Development Laboratory
-// (LRDE)
+// Copyright (C) 2009, 2010, 2011 EPITA Research and Development
+// Laboratory (LRDE)
//
// This file is part of Olena.
//
@@ -86,6 +86,9 @@ namespace scribo
Line ids start from 1.
+
+ \ingroup grpstruct
+
*/
template <typename L>
class line_set
diff --git a/scribo/scribo/core/object_groups.hh b/scribo/scribo/core/object_groups.hh
index 46447d2..53ba7f9 100644
--- a/scribo/scribo/core/object_groups.hh
+++ b/scribo/scribo/core/object_groups.hh
@@ -88,6 +88,8 @@ namespace scribo
/// \brief Object group representation.
+ ///
+ /// \ingroup grpstruct
//
template <typename L>
class object_groups : public Serializable<object_groups<L> >
diff --git a/scribo/scribo/core/object_links.hh b/scribo/scribo/core/object_links.hh
index 37fd299..a2543a7 100644
--- a/scribo/scribo/core/object_links.hh
+++ b/scribo/scribo/core/object_links.hh
@@ -69,7 +69,9 @@ namespace scribo
- /// \brief Object group representation.
+ /// \brief Object links representation.
+ ///
+ /// \ingroup grpstruct
//
template <typename L>
class object_links : public Serializable<object_links<L> >
diff --git a/scribo/scribo/core/paragraph_set.hh b/scribo/scribo/core/paragraph_set.hh
index c21359d..69194be 100644
--- a/scribo/scribo/core/paragraph_set.hh
+++ b/scribo/scribo/core/paragraph_set.hh
@@ -65,6 +65,7 @@ namespace scribo
Paragraph ids start from 1.
+ \ingroup grpstruct
*/
template <typename L>
class paragraph_set : public Serializable<paragraph_set<L> >
--
1.7.2.5