* bootstrap (require): Take a flag as an optional second argument.
Use it to check for HeVeA 1.10.
---
ChangeLog | 7 +++++++
bootstrap | 13 +++++++++----
2 files changed, 16 insertions(+), 4 deletions(-)
diff --git a/ChangeLog b/ChangeLog
index 53ba20c..55690d0 100644
--- a/ChangeLog
+++ b/ChangeLog
@@ -1,3 +1,10 @@
+2010-03-15 Roland Levillain <roland(a)lrde.epita.fr>
+
+ Have bootstrap check for HeVeA 1.10.
+
+ * bootstrap (require): Take a flag as an optional second argument.
+ Use it to check for HeVeA 1.10.
+
2010-03-12 Roland Levillain <roland(a)lrde.epita.fr>
Add texi2dvi as well as a Make helper to use it.
diff --git a/bootstrap b/bootstrap
index 62cc724..2bdd761 100755
--- a/bootstrap
+++ b/bootstrap
@@ -78,14 +78,17 @@ version_compare ()
esac
}
-# require TOOL REQUIREMENT
-# ------------------------
+# require TOOL REQUIREMENT [OPTION]
+# ---------------------------------
# Test that TOOL exists, and its version is at least REQUIREMENT.
+# If given, use OPTION to query TOOL; use `--version' otherwise.
require ()
{
- local version=$($1 --version | sed -n 's/[^0-9.]*\([0-9][0-9.]*\).*/\1/p;q')
+ local option
+ if test $# -eq 3; then option=$3; else option=--version; fi
+ local version=$($1 $option | sed -n 's/[^0-9.]*\([0-9][0-9.]*\).*/\1/p;q')
test x"$version" != x ||
- fatal "$1 is required"
+ fatal "$1 (version $2 or better) is required"
case $(version_compare "$2" "$version") in
'>') fatal "$1 $2 or better is required: this is $1 $version";;
esac
@@ -126,6 +129,8 @@ export LIBTOOLIZE=$libtoolize
require $libtoolize 1.5.22
require doxygen 1.5.6
+# FIXME: These should no longer be required when TeX4HT is used.
+require hevea 1.10 -version
# Tell what's going on.
set -x
--
1.5.6.5
* doc/tools/todoxygen.sh: Take a HTML file as input instead of a
LaTeX one.
* doc/Makefile.am (DOC_PACKAGES): New variable.
(TECHNICAL_HTML, TUTORIAL_HTML, REF_GUIDE_HTML):
New variables.
($(TECHNICAL_HTML), $(TUTORIAL_HTML), $(REF_GUIDE_HTML)):
New targets.
(dist_doc_DATA): Add $(TECHNICAL_HTML), $(TUTORIAL_HTML) and
$(REF_GUIDE_HTML).
($(TECHNICAL_HH)): Depend on and process $(TECHNICAL_HTML) instead
of $(technical_dir)/technical.tex.
($(TUTORIAL_HH)): Depend on and process $(TUTORIAL_HTML) instead
of $(tutorial_dir)/tutorial.tex.
($(REF_GUIDE_HH)): Depend on and process $(REF_GUIDE_HTML) instead
of $(ref_guide_dir)/ref_guide.tex.
(CLEANFILES): Remove now useless patterns.
(clean-local): Likewise.
Adjust paths.
---
milena/ChangeLog | 23 ++++++++++
milena/doc/Makefile.am | 94 +++++++++++++++++++----------------------
milena/doc/tools/todoxygen.sh | 46 ++++++++++++--------
3 files changed, 94 insertions(+), 69 deletions(-)
diff --git a/milena/ChangeLog b/milena/ChangeLog
index 3ac7958..80ea76d 100644
--- a/milena/ChangeLog
+++ b/milena/ChangeLog
@@ -1,5 +1,28 @@
2010-03-12 Roland Levillain <roland(a)lrde.epita.fr>
+ Split LaTeX-to-HTML rules to clear things up.
+
+ * doc/tools/todoxygen.sh: Take a HTML file as input instead of a
+ LaTeX one.
+ * doc/Makefile.am (DOC_PACKAGES): New variable.
+ (TECHNICAL_HTML, TUTORIAL_HTML, REF_GUIDE_HTML):
+ New variables.
+ ($(TECHNICAL_HTML), $(TUTORIAL_HTML), $(REF_GUIDE_HTML)):
+ New targets.
+ (dist_doc_DATA): Add $(TECHNICAL_HTML), $(TUTORIAL_HTML) and
+ $(REF_GUIDE_HTML).
+ ($(TECHNICAL_HH)): Depend on and process $(TECHNICAL_HTML) instead
+ of $(technical_dir)/technical.tex.
+ ($(TUTORIAL_HH)): Depend on and process $(TUTORIAL_HTML) instead
+ of $(tutorial_dir)/tutorial.tex.
+ ($(REF_GUIDE_HH)): Depend on and process $(REF_GUIDE_HTML) instead
+ of $(ref_guide_dir)/ref_guide.tex.
+ (CLEANFILES): Remove now useless patterns.
+ (clean-local): Likewise.
+ Adjust paths.
+
+2010-03-12 Roland Levillain <roland(a)lrde.epita.fr>
+
List non generated documentation outputs separately.
* doc/Makefile.am (OUTPUTS, EXAMPLES):
diff --git a/milena/doc/Makefile.am b/milena/doc/Makefile.am
index 98e3332..0d74098 100644
--- a/milena/doc/Makefile.am
+++ b/milena/doc/Makefile.am
@@ -90,6 +90,10 @@ TEXI2DVI_FLAGS += \
-I $(DOC_SRCDIR) -I $(OUTPUTS_SRCDIR) -I $(SPLIT_OUTPUTS_SRCDIR) \
-I $(IMG_SRCDIR) -I $(SPLIT_EXAMPLES_SRCDIR)
+# Additional files for HeVeA compilations.
+DOC_PACKAGES = $(srcdir)/doxyfuns.sty $(srcdir)/milena.sty
+
+
## ----------------------- ##
## Doxygen documentation. ##
## ----------------------- ##
@@ -310,6 +314,16 @@ technical_dependencies = $(doc_dir)/figures.stamp
# product is a directory, also refresh a timestamp (in the source
# dir).
+## FIXME: technical technical.hh does not seem to be used (by the
+## Doyxgen documentation); remove its generation? Careful,
+## technical/technical.html *is* probably required, though.
+
+TECHNICAL_HTML = $(technical_dir)/technical.html
+$(TECHNICAL_HTML): $(technical_dir)/technical.tex $(technical_dependencies)
+ hevea -O -fix $(DOC_PACKAGES) $< -o $@
+
+dist_doc_DATA += $(TECHNICAL_HTML)
+
# Intermediate product for the various doc targets of the parent
# directory.
#
@@ -317,8 +331,10 @@ technical_dependencies = $(doc_dir)/figures.stamp
# extension, since it is later parsed by Doxygen, which complains
# about `.html' files.
TECHNICAL_HH = $(technical_dir)/technical.hh
+# FIXME: Careful, technical-html depends on $(TECHNICAL_HH), not
+# $(TECHNICAL_HTML). Anyway, this shortcut should vanish soon.
technical-html: $(TECHNICAL_HH)
-$(TECHNICAL_HH): $(technical_dir)/technical.tex $(technical_dependencies)
+$(TECHNICAL_HH): $(TECHNICAL_HTML) $(doc_dir)/tools/todoxygen.sh
$(doc_dir)/tools/todoxygen.sh $< $(technical_dir) $(doc_dir)
@@ -334,20 +350,7 @@ EXTRA_DIST += \
$(technical_dir)/technical.tex \
$(TECHNICAL_HH)
-# FIXME: Remove `tutorial.aux tutorial.toc tutorial.log tutorial.bbl
-# tutorial.out *blg *.lot' when texi2dvi is used.
-#
-# FIXME: Why is $(TUTORIAL_PDF) listed here? It should be
-# automatically cleaned. I'm not sure either about the rest of
-# CLEANFILES.
-CLEANFILES += \
- technical.aux technical.toc technical.log technical.bbl technical.out \
- *blg *.lot \
- $(TECHNICAL_PDF) \
- *.haux *.hh *.html *.htoc \
- technical.html \
- technical.idx \
- $(TECHNICAL_HH)
+CLEANFILES += $(technical_dir)/technical.haux
## ---------- ##
@@ -393,6 +396,12 @@ tutorial_dependencies = \
# product is a directory, also refresh a timestamp (in the source
# dir).
+TUTORIAL_HTML = $(tutorial_dir)/tutorial.html
+$(TUTORIAL_HTML): $(tutorial_dir)/tutorial.tex $(tutorial_dependencies)
+ hevea -O -fix $(DOC_PACKAGES) $< -o $@
+
+dist_doc_DATA += $(TUTORIAL_HTML)
+
# Intermediate product for the various doc targets of the parent
# directory.
#
@@ -400,10 +409,13 @@ tutorial_dependencies = \
# since it is later parsed by Doxygen, which complains about `.html'
# files.
TUTORIAL_HH = $(tutorial_dir)/tutorial.hh
+# FIXME: Careful, tutorial-html depends on $(TUTORIAL_HH), not
+# $(TUTORIAL_HTML). Anyway, this shortcut should vanish soon.
tutorial-html: $(TUTORIAL_HH)
-$(TUTORIAL_HH): $(tutorial_dir)/tutorial.tex $(tutorial_dependencies)
+$(TUTORIAL_HH): $(TUTORIAL_HTML) $(doc_dir)/tools/todoxygen.sh
$(doc_dir)/tools/todoxygen.sh $< $(tutorial_dir) $(doc_dir)
+
# Final product.
TUTORIAL_PDF = $(tutorial_dir)/tutorial.pdf
tutorial-pdf: $(TUTORIAL_PDF)
@@ -416,20 +428,10 @@ EXTRA_DIST += \
$(tutorial_dir)/tutorial.tex \
$(TUTORIAL_HH)
-# FIXME: Remove `tutorial.aux tutorial.toc tutorial.log tutorial.bbl
-# tutorial.out *blg *.lot' when texi2dvi is used.
-#
-# FIXME: Why is $(TUTORIAL_PDF) listed here? It should be
+# FIXME: Why is $(TUTORIAL_HH) listed here? It should be
# automatically cleaned. I'm not sure either about the rest of
# CLEANFILES.
-CLEANFILES += \
- tutorial.aux tutorial.toc tutorial.log tutorial.bbl tutorial.out \
- *blg *.lot \
- $(TUTORIAL_PDF) \
- *.haux *.hh *.html *.htoc \
- tutorial.html \
- tutorial.idx \
- $(TUTORIAL_HH)
+CLEANFILES += $(tutorial_dir)/tutorial.haux
## ------------- ##
@@ -505,15 +507,10 @@ EXTRA_DIST += \
$(PNGS) \
$(EPSS)
-# FIXME: Remove unnecessary patterns.
-CLEANFILES += \
- white_paper_image.* \
- white_paper.pdf \
- *.log *.idx *.out *.aux
+CLEANFILES += $(EPSS)
-# FIXME: Likewise.
clean-local:
- -rm -rf white_paper_html figures
+ -rm -rf $(white_paper_dir)/white_paper_html
## ----------------- ##
@@ -559,6 +556,12 @@ ref_guide_dependencies = \
# product is a directory, also refresh a timestamp (in the source
# dir).
+REF_GUIDE_HTML = $(ref_guide_dir)/ref_guide.html
+$(REF_GUIDE_HTML): $(ref_guide_dir)/ref_guide.tex $(ref_guide_dependencies)
+ hevea -O -fix $(DOC_PACKAGES) $< -o $@
+
+dist_doc_DATA += $(REF_GUIDE_HTML)
+
# Intermediate product for the various doc targets of the parent
# directory.
#
@@ -566,8 +569,10 @@ ref_guide_dependencies = \
# since it is later parsed by Doxygen, which complains about `.html'
# files.
REF_GUIDE_HH = $(ref_guide_dir)/ref_guide.hh
+# FIXME: Careful, ref-guide-html depends on $(REF_GUIDE_HH), not
+# $(REF_GUIDE_HTML). Anyway, this shortcut should vanish soon.
ref-guide-html: $(REF_GUIDE_HH)
-$(REF_GUIDE_HH): $(ref_guide_dir)/ref_guide.tex $(ref_guide_dependencies)
+$(REF_GUIDE_HH): $(REF_GUIDE_HTML) $(doc_dir)/tools/todoxygen.sh
$(doc_dir)/tools/todoxygen.sh $< $(ref_guide_dir) $(doc_dir)
@@ -583,20 +588,9 @@ EXTRA_DIST += \
$(ref_guide_dir)/ref_guide.tex \
$(REF_GUIDE_HH)
-# FIXME: Remove `ref_guide.aux ref_guide.toc ref_guide.log
-# ref_guide.bbl ref_guide.out *blg *.lot' when texi2dvi is used.
-#
-# FIXME: Why is $(REF_GUIDE_PDF) listed here? It should be
-# automatically cleaned. I'm not sure either about the rest of
-# CLEANFILES.
-CLEANFILES += \
- ref_guide.aux ref_guide.toc ref_guide.log ref_guide.bbl ref_guide.out \
- *blg *.lot \
- $(REF_GUIDE_PDF) \
- *.haux *.hh *.html *.htoc \
- ref_guide.html \
- ref_guide.idx \
- $(REF_GUIDE_HH)
+CLEANFILES += \
+ $(ref_guide_dir)/ref_guide.haux \
+ $(ref_guide_dir)/ref_guide.htoc
## --------- ##
diff --git a/milena/doc/tools/todoxygen.sh b/milena/doc/tools/todoxygen.sh
index c61f5e1..9acab35 100755
--- a/milena/doc/tools/todoxygen.sh
+++ b/milena/doc/tools/todoxygen.sh
@@ -1,19 +1,28 @@
-#!/bin/sh
-
-if [ $# -ne 3 ]; then
- echo "Usage: $0 <file.tex> <output dir> <path to .sty>";
- exit 1;
-fi
-
-out="$2/`basename $1 .tex`.hh"
-html="$2/`basename $1 .tex`.html"
-
-DOC_PACKAGES="
- $3/doxyfuns.sty \
- $3/milena.sty"
-
-#Convert .tex to .html
-hevea -O -fix $DOC_PACKAGES $1 -o $html
+#! /bin/sh
+
+# todoxygen.sh: Turn HTML files into suitable inputs for doxygen.
+
+# Copyright (C) 2009, 2010 EPITA Research and Development Laboratory (LRDE).
+#
+# This file is part of Olena.
+#
+# Olena is free software: you can redistribute it and/or modify it under
+# the terms of the GNU General Public License as published by the Free
+# Software Foundation, version 2 of the License.
+#
+# Olena is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Olena. If not, see <http://www.gnu.org/licenses/>.
+
+test $# -eq 3 \
+ || { echo "Usage: $0 <file.html> <output dir> <path to .sty>"; exit 1; }
+
+html="$1"
+out="$2/`basename $1 .html`.hh"
bodyl=`grep -n "<BODY >" $html | cut -d ':' -f 1`
nlines=`wc -l $html | cut -d ' ' -f 1`
@@ -32,11 +41,10 @@ sed -i -e 's/“/\&ldquo/g' $out
sed -i -e 's/”/\&rdquo/g' $out
sed -i -e 's/≡/\&equiv/g' $out
-#Doxygen wants us to preserve '\n' after commands.
-#\ref and \see do not need that extra new line.
+# Doxygen wants us to preserve '\n' after commands.
+# \ref and \see do not need that extra new line.
for keyword in include section page subpage subsection subsubsection image; do
sed -i -e "s/\\\\endhtmlonly\\\\$keyword/\\\\endhtmlonly\n\n\\\\$keyword/g" $out
sed -i -e "s/\\\\endhtmlonly\*\//\\\\endhtmlonly\n\n\*\/\n\n/g" $out
sed -i -e "s/\\\\$keyword \(.*\)\\\\htmlonly/\\\\$keyword \1\n\n\\\\htmlonly\n/g" $out
done
-
--
1.5.6.5
* build-aux/texi2dvi: New script.
Imported from Texinfo.
* build-aux/tex.mk: New Makefile helper.
Inspired by LRDE share/style/tex.mk.
* build-aux/Makefile.am (EXTRA_DIST): Add texi2dvi.
---
ChangeLog | 10 +
build-aux/Makefile.am | 12 +-
build-aux/tex.mk | 40 +
build-aux/texi2dvi | 1959 +++++++++++++++++++++++++++++++++++++++++++++++++
4 files changed, 2017 insertions(+), 4 deletions(-)
create mode 100644 build-aux/tex.mk
create mode 100755 build-aux/texi2dvi
diff --git a/ChangeLog b/ChangeLog
index add6d8f..53ba20c 100644
--- a/ChangeLog
+++ b/ChangeLog
@@ -1,5 +1,15 @@
2010-03-12 Roland Levillain <roland(a)lrde.epita.fr>
+ Add texi2dvi as well as a Make helper to use it.
+
+ * build-aux/texi2dvi: New script.
+ Imported from Texinfo.
+ * build-aux/tex.mk: New Makefile helper.
+ Inspired by LRDE share/style/tex.mk.
+ * build-aux/Makefile.am (EXTRA_DIST): Add texi2dvi.
+
+2010-03-12 Roland Levillain <roland(a)lrde.epita.fr>
+
* configure.ac: Configure milena/doc/tools/data.hh.
2010-03-08 Roland Levillain <roland(a)lrde.epita.fr>
diff --git a/build-aux/Makefile.am b/build-aux/Makefile.am
index 1185eaf..0e1ad0f 100644
--- a/build-aux/Makefile.am
+++ b/build-aux/Makefile.am
@@ -1,4 +1,5 @@
-# Copyright (C) 2006, 2009 EPITA Research and Development Laboratory (LRDE).
+# Copyright (C) 2006, 2009, 2010 EPITA Research and Development
+# Laboratory (LRDE).
#
# This file is part of Olena.
#
@@ -13,7 +14,10 @@
#
# You should have received a copy of the GNU General Public License
# along with Olena. If not, see <http://www.gnu.org/licenses/>.
-#
-## Process this file through Automake to produce Makefile.in.
-EXTRA_DIST = generate_dist_headers.sh build_unit_test.sh
+# texi2dvi: produce DVI (or PDF) files from Texinfo (or (La)TeX) sources.
+# Part of the Texinfo project (http://www.gnu.org/software/texinfo/).
+EXTRA_DIST = texi2dvi
+
+# File generators.
+EXTRA_DIST += generate_dist_headers.sh build_unit_test.sh
diff --git a/build-aux/tex.mk b/build-aux/tex.mk
new file mode 100644
index 0000000..9fa00ff
--- /dev/null
+++ b/build-aux/tex.mk
@@ -0,0 +1,40 @@
+## tex.mk -*- Makefile-Automake -*-
+
+## A small subset of LRDE's share/make/tex.mk.
+
+## Copyright (C) 2010 EPITA Research and Development Laboratory (LRDE).
+##
+## This file is part of Olena.
+##
+## Olena is free software: you can redistribute it and/or modify it under
+## the terms of the GNU General Public License as published by the Free
+## Software Foundation, version 2 of the License.
+##
+## Olena is distributed in the hope that it will be useful,
+## but WITHOUT ANY WARRANTY; without even the implied warranty of
+## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+## General Public License for more details.
+##
+## You should have received a copy of the GNU General Public License
+## along with Olena. If not, see <http://www.gnu.org/licenses/>.
+
+build_aux_dir = $(top_srcdir)/build-aux
+
+TEXI2DVI = $(build_aux_dir)/texi2dvi
+TEXI2DVI_FLAGS = --tidy --build-dir=tmp.t2d --batch
+
+
+TEXI2PDF = $(TEXI2DVI) --pdf
+TEXI2PDF_FLAGS = $(TEXI2DVI_FLAGS)
+
+SUFFIXES = .pdf .tex
+
+.tex.pdf:
+ $(TEXI2PDF) $(TEXI2PDF_FLAGS) -o $@ $<
+
+tex-mostlyclean:
+ rm -rf tmp.t2d
+.PHONY: tex-mostlyclean
+# mostlyclean-local is an Automake special target.
+mostlyclean-local: tex-mostlyclean
+.PHONY: mostlyclean-local
diff --git a/build-aux/texi2dvi b/build-aux/texi2dvi
new file mode 100755
index 0000000..fce73b9
--- /dev/null
+++ b/build-aux/texi2dvi
@@ -0,0 +1,1959 @@
+#! /bin/sh
+# texi2dvi --- produce DVI (or PDF) files from Texinfo (or (La)TeX) sources.
+# $Id: texi2dvi,v 1.157 2010/02/09 18:37:08 karl Exp $
+#
+# Copyright 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2001, 2002, 2003,
+# 2004, 2005, 2006, 2007, 2008, 2009 Free Software Foundation, Inc.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 3 of the License,
+# or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+#
+# Originally written by Noah Friedman.
+#
+# Please send bug reports, etc. to bug-texinfo(a)gnu.org.
+# If possible, please send a copy of the output of the script called with
+# the `--debug' option when making a bug report.
+
+test -f /bin/ksh && test -z "$RUNNING_KSH" \
+ && { UNAMES=`uname -s`; test "x$UNAMES" = xULTRIX; } 2>/dev/null \
+ && { RUNNING_KSH=true; export RUNNING_KSH; exec /bin/ksh $0 ${1+"$@"}; }
+unset RUNNING_KSH
+
+# No failure shall remain unpunished.
+set -e
+
+# This string is expanded automatically when this file is checked out.
+rcs_revision='$Revision: 1.157 $'
+rcs_version=`set - $rcs_revision; echo $2`
+program=`echo $0 | sed -e 's!.*/!!'`
+
+build_mode=${TEXI2DVI_BUILD_MODE:-local}
+build_dir=${TEXI2DVI_BUILD_DIRECTORY:-.}
+
+# Initialize variables for option overriding and otherwise.
+# Don't use `unset' since old bourne shells don't have this command.
+# Instead, assign them an empty value.
+action=compile
+batch=false # interact normally
+catcode_special=maybe
+debug=false
+escape="\\"
+expand=false # true for expansion via makeinfo
+includes=
+line_error=true # pass --file-line-error to TeX
+max_iters=-1 # keep going forever
+oname= # --output
+out_lang=dvi
+quiet=false # let the tools' message be displayed
+recode=false
+set_language=
+src_specials=
+latex2html=hevea # or set to tex4ht
+textra= # Extra TeX commands to insert in the input file.
+txiprereq=19990129 # minimum texinfo.tex version with macro expansion
+verb=false # true for verbose mode
+translate_file= # name of charset translation file
+recode_from= # if not empty, recode from this encoding to @documentencoding
+
+orig_pwd=`pwd`
+
+# We have to initialize IFS to space tab newline since we save and
+# restore IFS and apparently POSIX allows stupid/broken behavior with
+# empty-but-set IFS.
+# http://lists.gnu.org/archive/html/automake-patches/2006-05/msg00008.html
+# We need space, tab and new line, in precisely that order. And don't leave
+# trailing blanks.
+space=' '
+tab=' '
+newline='
+'
+IFS="$space$tab$newline"
+
+# In case someone pedantic insists on using grep -E.
+: ${EGREP=egrep}
+
+# Systems which define $COMSPEC or $ComSpec use semicolons to separate
+# directories in TEXINPUTS -- except for Cygwin et al., where COMSPEC
+# might be inherited, but : is used.
+if test -n "$COMSPEC$ComSpec" \
+ && uname | $EGREP -iv 'cygwin|mingw|djgpp' >/dev/null; then
+ path_sep=";"
+else
+ path_sep=":"
+fi
+
+# Pacify verbose cds.
+CDPATH=${ZSH_VERSION+.}$path_sep
+
+# If $TEX is set to a directory, don't use it.
+test -n "$TEX" && test -d "$TEX" && unset TEX
+
+#
+## --------------------- ##
+## Auxiliary functions. ##
+## --------------------- ##
+
+# In case `local' is not supported by the shell, provide a function
+# that simulates it by simply performing the assignments. This means
+# that we must not expect `local' to work, i.e., we must not (i) rely
+# on it during recursion, and (ii) have two local declarations of the
+# same variable. (ii) is easy to check statically, and our test suite
+# does make sure there is never twice a static local declaration of a
+# variable. (i) cannot be checked easily, so just be careful.
+#
+# Note that since we might use a function simulating `local', we can
+# no longer rely on the fact that no IFS-splitting is performed. So,
+# while
+#
+# foo=$bar
+#
+# is fine (no IFS-splitting), never write
+#
+# local foo=$bar
+#
+# but rather
+#
+# local foo="$bar"
+(
+ foo=bar
+ test_local () {
+ local foo=foo
+ }
+ test_local
+ test $foo = bar
+) || local () {
+ case $1 in
+ *=*) eval "$1";;
+ esac
+}
+
+
+# cd_orig
+# -------
+# Return to the original directory.
+cd_orig ()
+{
+ # In case $orig_pwd is on a different drive (for DOS).
+ cd /
+
+ # Return to the original directory so that
+ # - the next file is processed in correct conditions
+ # - the temporary file can be removed
+ cd "$orig_pwd" || exit 1
+}
+
+# func_dirname FILE
+# -----------------
+# Return the directory part of FILE.
+func_dirname ()
+{
+ dirname "$1" 2>/dev/null \
+ || { echo "$1" | sed 's!/[^/]*$!!;s!^$!.!'; }
+}
+
+
+# noexit FILE
+# -----------
+# Return FILE with one extension remove. foo.bar.baz -> foo.bar.
+noext ()
+{
+ echo "$1" | sed -e 's/\.[^/.][^/.]*$//'
+}
+
+
+# absolute NAME -> ABS-NAME
+# -------------------------
+# Return an absolute path to NAME.
+absolute ()
+{
+ case $1 in
+ [\\/]* | ?:[\\/]*)
+ # Absolute paths don't need to be expanded.
+ echo "$1"
+ ;;
+ *) local slashes
+ slashes=`echo "$1" | sed -n 's,.*[^/]\(/*\)$,\1,p'`
+ local rel
+ rel=$orig_pwd/`func_dirname "$1"`
+ if test -d "$rel"; then
+ (cd "$rel" 2>/dev/null &&
+ local n
+ n=`pwd`/`basename "$1"`"$slashes"
+ echo "$n")
+ else
+ error 1 "not a directory: $rel"
+ fi
+ ;;
+ esac
+}
+
+
+# ensure_dir DIR1 DIR2...
+# -----------------------
+# Make sure the directories exist.
+ensure_dir ()
+{
+ for dir
+ do
+ # Beware that in parallel builds we may have several concurrent
+ # attempts to create the directory. So fail only if "mkdir"
+ # failed *and* the directory still does not exist.
+ test -d "$dir" \
+ || mkdir "$dir" \
+ || test -d "$dir" \
+ || error 1 "cannot create directory: $dir"
+ done
+}
+
+
+# error EXIT_STATUS LINE1 LINE2...
+# --------------------------------
+# Report an error and exit with failure if EXIT_STATUS is non null.
+error ()
+{
+ local s="$1"
+ shift
+ report "$@"
+ if test "$s" != 0; then
+ exit $s
+ fi
+}
+
+
+# findprog PROG
+# -------------
+# Return true if PROG is somewhere in PATH, else false.
+findprog ()
+{
+ local saveIFS="$IFS"
+ IFS=$path_sep # break path components at the path separator
+ for dir in $PATH; do
+ IFS=$saveIFS
+ # The basic test for an executable is `test -f $f && test -x $f'.
+ # (`test -x' is not enough, because it can also be true for directories.)
+ # We have to try this both for $1 and $1.exe.
+ #
+ # Note: On Cygwin and DJGPP, `test -x' also looks for .exe. On Cygwin,
+ # also `test -f' has this enhancement, but not on DJGPP. (Both are
+ # design decisions, so there is little chance to make them consistent.)
+ # Thusly, it seems to be difficult to make use of these enhancements.
+ #
+ if { test -f "$dir/$1" && test -x "$dir/$1"; } ||
+ { test -f "$dir/$1.exe" && test -x "$dir/$1.exe"; }; then
+ return 0
+ fi
+ done
+ return 1
+}
+
+# report LINE1 LINE2...
+# ---------------------
+# Report some information on stderr.
+report ()
+{
+ for i in "$@"
+ do
+ echo >&2 "$0: $i"
+ done
+}
+
+
+# run COMMAND-LINE
+# ----------------
+# Run the COMMAND-LINE verbosely, and catching errors as failures.
+run ()
+{
+ verbose "Running $@"
+ "$@" 2>&5 1>&2 ||
+ error 1 "$1 failed"
+}
+
+
+# usage
+# -----
+# Display usage and exit successfully.
+usage ()
+{
+ # We used to simply have `echo "$usage"', but coping with the
+ # changing behavior of `echo' is much harder than simply using a
+ # here-doc.
+ #
+ # echo '\noto' echo '\\noto' echo -e '\\noto'
+ # bash 3.1 \noto \\noto \noto
+ # bash 3.2 %oto \noto -e \noto
+ #
+ # where % denotes the eol character.
+ cat <<EOF
+Usage: $program [OPTION]... FILE...
+ texi2pdf [OPTION]... FILE...
+ pdftexi2dvi [OPTION]... FILE...
+
+Run each Texinfo or (La)TeX FILE through TeX in turn until all
+cross-references are resolved, building all indices. The directory
+containing each FILE is searched for included files. The suffix of FILE
+is used to determine its language ((La)TeX or Texinfo). To process
+(e)plain TeX files, set the environment variable LATEX=tex.
+
+In order to make texi2dvi a drop-in replacement of TeX/LaTeX in AUC-TeX,
+the FILE may also be composed of the following simple TeX commands.
+ \`\\input{FILE}' the actual file to compile
+ \`\\nonstopmode' same as --batch
+
+When invoked as \`texi2pdf' or \`pdftexi2dvi', or given the option --pdf
+or --dvipdf, generate PDF output. Otherwise, generate DVI.
+
+General options:
+ -b, --batch no interaction
+ -D, --debug turn on shell debugging (set -x)
+ -h, --help display this help and exit successfully
+ -o, --output=OFILE leave output in OFILE (implies --clean);
+ only one input FILE may be specified in this case
+ -q, --quiet no output unless errors (implies --batch)
+ -s, --silent same as --quiet
+ -v, --version display version information and exit successfully
+ -V, --verbose report on what is done
+
+TeX tuning:
+ -@ use @input instead of \input for preloaded Texinfo
+ -e, -E, --expand force macro expansion using makeinfo
+ -I DIR search DIR for Texinfo files
+ -l, --language=LANG specify LANG for FILE, either latex or texinfo
+ --no-line-error do not pass --file-line-error to TeX
+ -r, --recode call recode before TeX to translate input
+ --recode-from=ENC recode from ENC to the @documentencoding
+ --src-specials pass --src-specials to TeX
+ -t, --command=CMD insert CMD in copy of input file
+ or --texinfo=CMD multiple values accumulate
+ --translate-file=FILE use given charset translation file for TeX
+
+Output format:
+ --dvi output a DVI file [default]
+ --dvipdf output a PDF file via DVI (using dvipdf)
+ --html output an HTML file. Use HeVeA for LaTeX files
+ --info output an Info file. Use HeVeA for LaTeX files
+ -p, --pdf use pdftex or pdflatex for processing
+ --ps output a PDF file via DVI (using dvips)
+ --text output a plain text file. Use HeVeA for LaTeX files
+
+Build modes:
+ --build=MODE specify the treatment of auxiliary files [$build_mode]
+ --tidy same as --build=tidy
+ -c, --clean same as --build=clean
+ --build-dir=DIR specify where the tidy compilation is performed;
+ implies --tidy;
+ defaults to TEXI2DVI_BUILD_DIRECTORY [$build_dir]
+ --mostly-clean remove the auxiliary files and directories
+ but not the output
+ --max-iterations=N don't process files more than N times
+
+The MODE specifies where the TeX compilation takes place, and, as a
+consequence, how auxiliary files are treated. The build mode
+can also be set using the environment variable TEXI2DVI_BUILD_MODE.
+
+Valid MODEs are:
+ \`local' compile in the current directory, leaving all the auxiliary
+ files around. This is the traditional TeX use.
+ \`tidy' compile in a local *.t2d directory, where the auxiliary files
+ are left. Output files are copied back to the original file.
+ \`clean' same as \`tidy', but remove the auxiliary directory afterwards.
+ Every compilation therefore requires the full cycle.
+
+Using the \`tidy' mode brings several advantages:
+ - the current directory is not cluttered with plethora of temporary files.
+ - clutter can be even further reduced using --build-dir=dir: all the *.t2d
+ directories are stored there.
+ - clutter can be reduced to zero using, e.g., --build-dir=/tmp/\$USER.t2d
+ or --build-dir=\$HOME/.t2d.
+ - the output file is updated after every succesful TeX run, for
+ sake of concurrent visualization of the output. In a \`local' build
+ the viewer stops during the whole TeX run.
+ - if the compilation fails, the previous state of the output file
+ is preserved.
+ - PDF and DVI compilation are kept in separate subdirectories
+ preventing any possibility of auxiliary file incompatibility.
+
+On the other hand, because \`tidy' compilation takes place in another
+directory, occasionally TeX won't be able to find some files (e.g., when
+using \\graphicspath): in that case use -I to specify the additional
+directories to consider.
+
+The values of the BIBTEX, DVIPDF, DVIPS, LATEX, MAKEINDEX, MAKEINFO,
+PDFLATEX, PDFTEX, TEX, TEXINDEX, and THUMBPDF environment variables are used
+to run those commands, if they are set. Any CMD strings are added after
+@setfilename for Texinfo input, in the first line for LaTeX input.
+
+Report bugs to bug-texinfo(a)gnu.org,
+general questions and discussion to help-texinfo(a)gnu.org.
+GNU Texinfo home page: <http://www.gnu.org/software/texinfo/>
+General help using GNU software: <http://www.gnu.org/gethelp/>
+EOF
+ exit 0
+}
+
+
+# verbose WORD1 WORD2
+# -------------------
+# Report some verbose information.
+verbose ()
+{
+ if $verb; then
+ echo >&2 "$0: $@"
+ fi
+}
+
+
+# version
+# -------
+# Display version info and exit succesfully.
+version ()
+{
+ cat <<EOF
+texi2dvi (GNU Texinfo 4.13) $rcs_version
+
+Copyright (C) 2008 Free Software Foundation, Inc.
+License GPLv3+: GNU GPL version 3 or later <http://gnu.org/licenses/gpl.html>
+This is free software: you are free to change and redistribute it.
+There is NO WARRANTY, to the extent permitted by law.
+EOF
+ exit 0
+}
+
+
+## ---------------- ##
+## Handling lists. ##
+## ---------------- ##
+
+
+# list_append LIST-NAME ELEM
+# --------------------------
+# Set LIST-NAME to its former contents, with ELEM appended.
+list_append ()
+{
+ local la_l="$1"
+ shift
+ eval set X \$$la_l "$@"
+ shift
+ eval $la_l=\""$@"\"
+}
+
+
+# list_concat_dirs LIST-NAME DIR-LIST
+# -----------------------------------
+# Append to LIST-NAME all the components (included empty) from
+# the $path_sep separated list DIR-LIST. Make the paths absolute.
+list_concat_dirs ()
+{
+ local lcd_list="$1"
+ # Empty path components are meaningful to tex. We rewrite them as
+ # `EMPTY' so they don't get lost when we split on $path_sep.
+ # Hopefully no one will have an actual directory named EMPTY.
+ local replace_EMPTY="-e 's/^$path_sep/EMPTY$path_sep/g' \
+ -e 's/$path_sep\$/${path_sep}EMPTY/g' \
+ -e 's/$path_sep$path_sep/${path_sep}EMPTY:/g'"
+ save_IFS=$IFS
+ IFS=$path_sep
+ set x `echo "$2" | eval sed $replace_EMPTY`; shift
+ IFS=$save_IFS
+ local dir
+ for dir
+ do
+ case $dir in
+ EMPTY)
+ list_append $lcd_list ""
+ ;;
+ *)
+ if test -d $dir; then
+ dir=`absolute "$dir"`
+ list_append $lcd_list "$dir"
+ fi
+ ;;
+ esac
+ done
+}
+
+
+# list_prefix LIST-NAME SEP -> STRING
+# -----------------------------------
+# Return a string that is composed of the LIST-NAME with each item
+# preceded by SEP.
+list_prefix ()
+{
+ local lp_p="$2"
+ eval set X \$$1
+ shift
+ local lp_res
+ for i
+ do
+ lp_res="$lp_res \"$lp_p\" \"$i\""
+ done
+ echo "$lp_res"
+}
+
+# list_infix LIST-NAME SEP -> STRING
+# ----------------------------------
+# Same as list_prefix, but a separator.
+list_infix ()
+{
+ eval set X \$$1
+ shift
+ local la_IFS="$IFS"
+ IFS=$path_sep
+ echo "$*"
+ IFS=$la_IFS
+}
+
+# list_dir_to_abs LIST-NAME
+# -------------------------
+# Convert the list to using only absolute dir names.
+# Currently unused, but should replace absolute_filenames some day.
+list_dir_to_abs ()
+{
+ local ld_l="$1"
+ eval set X \$$ld_l
+ shift
+ local ld_res
+ for dir
+ do
+ dir=`absolute "$dir"`
+ test -d "$dir" || continue
+ ld_res="$ld_res \"$dir\""
+ done
+ set X $ld_res; shift
+ eval $ld_l=\"$@\"
+}
+
+
+## ------------------------------ ##
+## Language auxiliary functions. ##
+## ------------------------------ ##
+
+
+# out_lang_set LANG
+# -----------------
+out_lang_set ()
+{
+ case $1 in
+ dvi|dvipdf|html|info|pdf|ps|text) out_lang=$1;;
+ *) error 1 "invalid output format: $1";;
+ esac
+}
+
+
+# out_lang_tex
+# ------------
+# Return the tex output language (DVI or PDF) for $OUT_LANG.
+out_lang_tex ()
+{
+ case $out_lang in
+ dvi | ps | dvipdf ) echo dvi;;
+ pdf ) echo $out_lang;;
+ html | info | text ) echo $out_lang;;
+ *) error 1 "invalid out_lang: $1";;
+ esac
+}
+
+
+# out_lang_ext
+# ------------
+# Return the extension for $OUT_LANG.
+out_lang_ext ()
+{
+ case $out_lang in
+ dvipdf ) echo pdf;;
+ dvi | html | info | pdf | ps | text ) echo $out_lang;;
+ *) error 1 "invalid out_lang: $1";;
+ esac
+}
+
+
+## ------------------------- ##
+## TeX auxiliary functions. ##
+## ------------------------- ##
+
+# Save TEXINPUTS so we can construct a new TEXINPUTS path for each file.
+# Likewise for bibtex and makeindex.
+tex_envvars="BIBINPUTS BSTINPUTS DVIPSHEADERS INDEXSTYLE MFINPUTS MPINPUTS \
+TEXINPUTS TFMFONTS"
+for var in $tex_envvars; do
+ eval ${var}_orig=\$$var
+ export $var
+done
+
+
+# absolute_filenames TEX-PATH -> TEX-PATH
+# ---------------------------------------
+# Convert relative paths to absolute paths, so we can run in another
+# directory (e.g., in tidy build mode, or during the macro-support
+# detection). Prepend ".".
+absolute_filenames ()
+{
+ # Empty path components are meaningful to tex. We rewrite them as
+ # `EMPTY' so they don't get lost when we split on $path_sep.
+ # Hopefully no one will have an actual directory named EMPTY.
+ local replace_empty="-e 's/^$path_sep/EMPTY$path_sep/g' \
+ -e 's/$path_sep\$/${path_sep}EMPTY/g' \
+ -e 's/$path_sep$path_sep/${path_sep}EMPTY:/g'"
+ local res
+ res=`echo "$1" | eval sed $replace_empty`
+ save_IFS=$IFS
+ IFS=$path_sep
+ set x $res; shift
+ res=.
+ for dir
+ do
+ case $dir in
+ EMPTY)
+ res=$res$path_sep
+ ;;
+ *)
+ if test -d "$dir"; then
+ res=$res$path_sep`absolute "$dir"`
+ else
+ # Even if $dir is not a directory, preserve it in the path.
+ # It might contain metacharacters that TeX will expand in
+ # turn, e.g., /some/path/{a,b,c}. This will not get the
+ # implicit absolutification of the path, but we can't help that.
+ res=$res$path_sep$dir
+ fi
+ ;;
+ esac
+ done
+ echo "$res"
+}
+
+
+# output_base_name FILE
+# ---------------------
+# The name of FILE, possibly renamed to satisfy --output.
+# FILE is local, there is no directory part.
+output_base_name ()
+{
+ case $oname in
+ '') echo "$1";;
+ *) local out_noext
+ out_noext=`noext "$oname"`
+ local file_ext
+ file_ext=`echo "$1" | sed 's/^.*\.//'`
+ echo "$out_noext.$file_ext"
+ ;;
+ esac
+}
+
+
+# destdir
+# -------
+# Return the name of the directory where the output is expected.
+destdir ()
+{
+ case $oname in
+ '') echo "$orig_pwd";;
+ *) dirname "$oname";;
+ esac
+}
+
+
+# move_to_dest FILE...
+# --------------------
+# Move FILE to the place where the user expects it. Truly move it, that
+# is, it must not remain in its build location unless that is also the
+# output location. (Otherwise it might appear as an extra file in make
+# distcheck.)
+#
+# FILE can be the principal output (in which case -o directly applies), or
+# an auxiliary file with the same base name.
+move_to_dest ()
+{
+ # If we built in place, there is nothing to install, leave.
+ case $tidy:$oname in
+ false:) return;;
+ esac
+
+ local destfile
+ local destdir
+ local destbase
+ local sourcedir
+ local sourcebase
+
+ for file
+ do
+ test -f "$file" ||
+ error 1 "no such file or directory: $file"
+ case $tidy:$oname in
+ true:) destdir=$orig_pwd
+ destfile=$destdir/$file;;
+ *:*) destfile=`output_base_name "$file"`
+ destdir=`dirname "$destfile"`;;
+ esac
+ # We want to compare the source location and the output location,
+ # and if they are different, do the move. But if they are the
+ # same, we must preserve the source. Since we can't assume
+ # stat(1) or test -ef is available, resort to comparing the
+ # directory names, canonicalized with pwd. We can't use cmp -s
+ # since the output file might not actually change from run to run;
+ # e.g., TeX DVI output is timestamped to only the nearest minute.
+ destdir=`cd "$destdir" && pwd`
+ destbase=`basename "$destfile"`
+
+ sourcedir=`dirname "$file"`
+ sourcedir=`cd "$sourcedir" && pwd`
+ sourcebase=`basename "$file"`
+
+ if test "$sourcedir/$sourcebase" != "$destdir/$destbase"; then
+ verbose "Moving $file to $destfile"
+ rm -f "$destfile"
+ mv "$file" "$destfile"
+ fi
+ done
+}
+
+
+## --------------------- ##
+## Managing xref files. ##
+## --------------------- ##
+
+# aux_file_p FILE
+# ---------------
+# Return with success if FILE is an aux file.
+aux_file_p ()
+{
+ test -f "$1" || return 1
+ case $1 in
+ *.aux) return 0;;
+ *) return 1;;
+ esac
+}
+
+# bibaux_file_p FILE
+# ------------------
+# Return with success if FILE is an aux file containing citation
+# requests.
+bibaux_file_p ()
+{
+ test -s "$1" || return 1
+ if (grep '^\\bibstyle[{]' "$1" \
+ && grep '^\\bibdata[{]' "$1" \
+ ## The following line is suspicious: fails when there
+ ## are citations in sub aux files. We need to be
+ ## smarter in this case.
+ ## && grep '^\\citation[{]' "$f"
+ ) >&6 2>&1;
+ then
+ return 0
+ fi
+ return 1
+}
+
+# index_file_p FILE
+# -----------------
+# Return with success if FILE is an index file.
+index_file_p ()
+{
+ test -f "$1" || return 1
+ case $in_lang:$latex2html:`out_lang_tex`:`sed '1q' "$1"` in
+ # When working with TeX4HT, *.idx are created by LaTeX. They must
+ # be processed to produce *.4ix, *.4dx files. The *.4dx file is
+ # passed to makeindex to produce the *.ind file. This sequence is
+ # handled by run_index, so we are only interested in the *.idx
+ # files, which have each "\indexentry" preceded by a
+ # "\beforeentry".
+ latex:tex4ht:html:"\\beforeentry {"*) return 0;;
+
+ # When index.sty is used, there is a space before the brace.
+ latex:*:*:"\\indexentry{"*|latex:*:*:"\\indexentry {"*) return 0;;
+
+ texinfo:*:*:"\\entry{"*) return 0;;
+
+ *) return 1;;
+ esac
+}
+
+# xref_file_p FILE
+# ----------------
+# Return with success if FILE is an xref file (indexes, tables and lists).
+xref_file_p ()
+{
+ test -f "$1" || return 1
+ # If the file is not suitable to be an index or xref file, don't
+ # process it. It's suitable if the first character is a
+ # backslash or right quote or at, as long as the first line isn't
+ # \input texinfo.
+ case `sed '1q' "$1"` in
+ "\\input texinfo"*) return 1;;
+ [\\''@]*) return 0;;
+ *) return 1;;
+ esac
+}
+
+
+# generated_files_get FILENAME-NOEXT [PREDICATE-FILTER]
+# -----------------------------------------------------
+# Return the list of files generated by the TeX compilation of FILENAME-NOEXT.
+generated_files_get ()
+{
+ local filter=true
+ if test -n "$2"; then
+ filter=$2
+ fi
+
+ # Gather the files created by TeX.
+ (
+ if test -f "$1.log"; then
+ sed -n -e "s,^\\\\openout.* = \`\\(.*\\)'\\.,\\1,p" "$1.log"
+ fi
+ echo "$1.log"
+ ) |
+ # Depending on these files, infer outputs from other tools.
+ while read file; do
+ echo $file
+ case $in_lang in
+ texinfo)
+ # texindex: texinfo.cp -> texinfo.cps
+ if index_file_p $file; then
+ echo ${file}s
+ fi
+ ;;
+ latex)
+ if aux_file_p $file; then
+ # bibtex: *.aux -> *.bbl and *.blg.
+ echo $file | sed 's/^\(.*\)\.aux$/\1.bbl/'
+ echo $file | sed 's/^\(.*\)\.aux$/\1.blg/'
+ # -recorder: .fls
+ echo $file | sed 's/^\(.*\)\.aux$/\1.fls/'
+ fi
+ ;;
+ esac
+ done |
+ # Filter existing files matching the criterion.
+ #
+ # With an input file name containing a space, this produces a
+ # "command not found" message (and filtering is ineffective).
+ # The situation with a newline is presumably even worse.
+ while read file; do
+ if $filter "$file"; then
+ echo $file
+ fi
+ done |
+ sort |
+ # Some files are opened several times, e.g., listings.sty's *.vrb.
+ uniq
+}
+
+
+# xref_files_save
+# ---------------
+# Save the xref files.
+xref_files_save ()
+{
+ # Save copies of auxiliary files for later comparison.
+ xref_files_orig=`generated_files_get "$in_noext" xref_file_p`
+ if test -n "$xref_files_orig"; then
+ verbose "Backing up xref files: $xref_files_orig"
+ # The following line improves `cp $xref_files_orig "$work_bak"'
+ # by preserving the directory parts. Think of
+ # cp chap1/main.aux chap2/main.aux $work_bak.
+ #
+ # Users may have, e.g., --keep-old-files. Don't let this interfere.
+ # (Don't use unset for the sake of ancient shells.)
+ TAR_OPTIONS=; export TAR_OPTIONS
+ tar cf - $xref_files_orig | (cd "$work_bak" && tar xf -)
+ fi
+}
+
+
+# xref_files_changed
+# ------------------
+# Whether the xref files were changed since the previous run.
+xref_files_changed ()
+{
+ # LaTeX (and the package changebar) report in the LOG file if it
+ # should be rerun. This is needed for files included from
+ # subdirs, since texi2dvi does not try to compare xref files in
+ # subdirs. Performing xref files test is still good since LaTeX
+ # does not report changes in xref files.
+ if grep "Rerun to get" "$in_noext.log" >&6 2>&1; then
+ return 0
+ fi
+
+ # If old and new lists don't at least have the same file list,
+ # then one file or another has definitely changed.
+ xref_files_new=`generated_files_get "$in_noext" xref_file_p`
+ verbose "Original xref files = $xref_files_orig"
+ verbose "New xref files = $xref_files_new"
+ if test "x$xref_files_orig" != "x$xref_files_new"; then
+ return 0
+ fi
+
+ # Compare each file until we find a difference.
+ for this_file in $xref_files_new; do
+ verbose "Comparing xref file `echo $this_file | sed 's|\./||g'` ..."
+ # cmp -s returns nonzero exit status if files differ.
+ if cmp -s "$this_file" "$work_bak/$this_file"; then :; else
+ verbose "xref file `echo $this_file | sed 's|\./||g'` differed ..."
+ if $debug; then
+ diff -u "$work_bak/$this_file" "$this_file"
+ fi
+ return 0
+ fi
+ done
+
+ # No change.
+ return 1
+}
+
+
+
+## ----------------------- ##
+## Running the TeX suite. ##
+## ----------------------- ##
+
+
+
+# run_tex ()
+# ----------
+# Run TeX as "$tex $in_input", taking care of errors and logs.
+run_tex ()
+{
+ case $in_lang:$latex2html:`out_lang_tex` in
+ latex:*:dvi|latex:tex4ht:html)
+ tex=${LATEX:-latex};;
+ latex:*:pdf)
+ tex=${PDFLATEX:-pdflatex};;
+ texinfo:*:dvi)
+ # MetaPost also uses the TEX environment variable. If the user
+ # has set TEX=latex for that reason, don't bomb out.
+ case $TEX in
+ *latex) tex=tex;; # don't bother trying to find etex
+ *) tex=$TEX
+ esac;;
+ texinfo:*:pdf) tex=$PDFTEX;;
+
+ *) error 1 "$out_lang not supported for $in_lang";;
+ esac
+
+ # do the special catcode trick for ~ in filenames only for Texinfo,
+ # not LaTeX.
+ if test x"$in_lang" = xtexinfo && test $catcode_special = maybe; then
+ catcode_special=true
+ else
+ catcode_special=false
+ fi
+
+ # Beware of aux files in subdirectories that require the
+ # subdirectory to exist.
+ case $in_lang:$tidy in
+ latex:true)
+ sed -n 's|^[ ]*\\include{\(.*\)/.*}.*|\1|p' "$in_input" |
+ sort -u |
+ while read d
+ do
+ ensure_dir "$work_build/$d"
+ done
+ ;;
+ esac
+
+ # Note that this will be used via an eval: quote properly.
+ local cmd="$tex"
+
+ # If possible, make TeX report error locations in GNU format.
+ if $line_error; then
+ if test "${tex_help:+set}" != set; then
+ # Go to a temporary directory to try --help, since old versions that
+ # don't accept --help will generate a texput.log.
+ tex_help_dir=$t2ddir/tex_help
+ ensure_dir "$tex_help_dir"
+ tex_help=`cd "$tex_help_dir" >&6 && $tex --help </dev/null 2>&1 || true`
+ fi
+ # The mk program and perhaps others want to parse TeX's
+ # original error messages.
+ case $tex_help in
+ *file-line-error*) cmd="$cmd --file-line-error";;
+ esac
+ fi
+
+ # Tell TeX about TCX file, if specified.
+ test -n "$translate_file" && cmd="$cmd --translate-file=$translate_file"
+
+ # Tell TeX to make source specials (for backtracking from output to
+ # source, given a sufficiently smart editor), if specified.
+ test -n "$src_specials" && cmd="$cmd $src_specials"
+
+ # Tell TeX to be batch if requested.
+ if $batch; then
+ # \batchmode does not show terminal output at all, so we don't
+ # want that. And even in batch mode, TeX insists on having input
+ # from the user. Close its stdin to make it impossible.
+ cmd="$cmd </dev/null '${escape}nonstopmode'"
+ fi
+
+ # we'd like to handle arbitrary input file names, especially
+ # foo~bar/a~b.tex, since Debian likes ~ characters.
+ if $catcode_special; then
+ # $normaltilde is just to reduce line length in this source file.
+ # The idea is to define \normaltilde as a catcode other ~ character,
+ # then make the active ~ be equivalent to that, instead of the plain
+ # TeX tie. Then when the active ~ appears in the filename, it will
+ # be expanded to itself, as far as \input will see. (This is the
+ # same thing that texinfo.tex does in general, BTW.)
+ normaltilde="${escape}catcode126=12 ${escape}def${escape}normaltilde{~}"
+ cmd="$cmd '$normaltilde${escape}catcode126=13 ${escape}let~\normaltilde '"
+ fi
+ # Other special (non-active) characters could be supported by
+ # resetting their catcodes to other on the command line and changing
+ # texinfo.tex to initialize everything to plain catcodes. Maybe someday.
+
+ # append the \input command.
+ cmd="$cmd '${escape}input'"
+
+ # TeX's \input does not (easily or reliably) support whitespace
+ # characters or other special characters in file names. Our intensive
+ # use of absolute file names makes this worse: the enclosing directory
+ # names may include white spaces. Improve the situation using a
+ # symbolic link to the filename in the current directory, in tidy mode
+ # only. Do not alter in_input.
+ #
+ # The filename is almost always tokenized using plain TeX conventions
+ # (the exception would be if the user made a texinfo.fmt file). Not
+ # all the plain TeX special characters cause trouble, but there's no
+ # harm in making the link.
+ #
+ case $tidy:`func_dirname "$in_input"` in
+ true:*["$space$tab$newline\"#\$%\\^_{}~"]*)
+ _run_tex_file_name=`basename "$in_input"`
+ if test ! -f "$_run_tex_file_name"; then
+ # It might not be a file, clear it.
+ run rm -f "$_run_tex_file_name"
+ run ln -s "$in_input"
+ fi
+ cmd="$cmd '$_run_tex_file_name'"
+ ;;
+
+ *)
+ cmd="$cmd '$in_input'"
+ ;;
+ esac
+
+ verbose "$0: Running $cmd ..."
+ if eval "$cmd" >&5; then
+ case $out_lang in
+ dvi | pdf ) move_to_dest "$in_noext.$out_lang";;
+ esac
+ else
+ error 1 "$tex exited with bad status, quitting."
+ fi
+}
+
+# run_bibtex ()
+# -------------
+# Run bibtex on current file.
+# - If its input (AUX) exists.
+# - If some citations are missing (LOG contains `Citation').
+# or the LOG complains of a missing .bbl
+#
+# Don't try to be too smart:
+#
+# 1. Running bibtex only if the bbl file exists and is older than
+# the LaTeX file is wrong, since the document might include files
+# that have changed.
+#
+# 3. Because there can be several AUX (if there are \include's),
+# but a single LOG, looking for missing citations in LOG is
+# easier, though we take the risk to match false messages.
+run_bibtex ()
+{
+ case $in_lang in
+ latex) bibtex=${BIBTEX:-bibtex};;
+ texinfo) return;;
+ esac
+
+ # "Citation undefined" is for LaTeX, "Undefined citation" for btxmac.tex.
+ # The no .aux && \bibdata test is also for btxmac, in case it was the
+ # first run of a bibtex-using document. Otherwise, it's possible that
+ # bibtex would never be run.
+ if test -r "$in_noext.aux" \
+ && test -r "$in_noext.log" \
+ && ((grep 'Warning:.*Citation.*undefined' "$in_noext.log" \
+ || grep '.*Undefined citation' "$in_noext.log" \
+ || grep 'No file .*\.bbl\.' "$in_noext.log") \
+ || (grep 'No \.aux file' "$in_noext.log" \
+ && grep '^\\bibdata' "$in_noext.aux")) \
+ >&6 2>&1; \
+ then
+ for f in `generated_files_get "$in_noext" bibaux_file_p`
+ do
+ run $bibtex "$f"
+ done
+ fi
+}
+
+# run_index ()
+# ------------
+# Run texindex (or makeindex or texindy) on current index files. If
+# they already exist, and after running TeX a first time the index
+# files don't change, then there's no reason to run TeX again. But we
+# won't know that if the index files are out of date or nonexistent.
+run_index ()
+{
+ local index_files=`generated_files_get $in_noext index_file_p`
+ test -n "$index_files" ||
+ return 0
+
+ : ${MAKEINDEX:=makeindex}
+ : ${TEXINDEX:=texindex}
+ : ${TEXINDY:=texindy}
+
+ local index_file
+ local index_noext
+ case $in_lang:$latex2html:`out_lang_tex` in
+ latex:tex4ht:html)
+ for index_file in $index_files
+ do
+ index_noext=`noext "$index_file"`
+ run tex \
+ '\def\filename{{'"$index_noext"'}{idx}{4dx}{ind}}
+ \input idxmake.4ht'
+ run $MAKEINDEX -o $index_noext.ind $index_noext.4dx
+ done
+ ;;
+
+ latex:*)
+ if $TEXINDY --version >&6 2>&1; then
+ run $TEXINDY $index_files
+ else
+ run $MAKEINDEX $index_files
+ fi
+ ;;
+
+ texinfo:*)
+ run $TEXINDEX $index_files
+ ;;
+ esac
+}
+
+
+# run_tex4ht ()
+# -------------
+# Run the last two phases of TeX4HT: tex4ht extracts the HTML from the
+# instrumented DVI file, and t4ht converts the figures and installs
+# the files when given -d.
+#
+# Because knowing exactly which files are created is complex (in
+# addition the names are not simple to compute), which makes it
+# difficult to install the output files in a second step, it is much
+# simpler to install directly the output files.
+run_tex4ht ()
+{
+ case $in_lang:$latex2html:`out_lang_tex` in
+ latex:tex4ht:html)
+ : ${TEX4HT=tex4ht} ${T4HT=t4ht}
+ run "$TEX4HT" "-f/$in_noext"
+ # Do not remove the / after the destdir.
+ run "$T4HT" "-d`destdir`/" "-f/$in_noext"
+ ;;
+ esac
+}
+
+
+# run_thumbpdf ()
+# ---------------
+run_thumbpdf ()
+{
+ if test `out_lang_tex` = pdf \
+ && test -r "$in_noext.log" \
+ && grep 'thumbpdf\.sty' "$in_noext.log" >&6 2>&1; \
+ then
+ thumbpdf=${THUMBPDF:-thumbpdf}
+ thumbcmd="$thumbpdf $in_dir/$in_noext"
+ verbose "Running $thumbcmd ..."
+ if $thumbcmd >&5; then
+ run_tex
+ else
+ report "$thumbpdf exited with bad status." \
+ "Ignoring its output."
+ fi
+ fi
+}
+
+
+# run_dvipdf FILE.dvi
+# -------------------
+# Convert FILE.dvi to FILE.pdf.
+run_dvipdf ()
+{
+ # Find which dvi->pdf program is available.
+ if test -z "$dvipdf"; then
+ for i in "$DVIPDF" dvipdfmx dvipdfm dvipdf dvi2pdf dvitopdf;
+ do
+ if findprog $i; then
+ dvipdf=$i
+ fi
+ done
+ fi
+ # These tools have varying interfaces, some 'input output', others
+ # 'input -o output'. They all seem to accept 'input' only,
+ # outputting using the expected file name.
+ run $dvipdf "$1"
+ if test ! -f `echo "$1" | sed -e 's/\.dvi$/.pdf/'`; then
+ error 1 "cannot find output file"
+ fi
+}
+
+# run_tex_suite ()
+# ----------------
+# Run the TeX tools until a fix point is reached.
+run_tex_suite ()
+{
+ # Move to the working directory.
+ if $tidy; then
+ verbose "cd $work_build"
+ cd "$work_build" || exit 1
+ fi
+
+ # Count the number of cycles.
+ local cycle=0
+
+ while :; do
+ # check for probably LaTeX loop (e.g. varioref)
+ if test $cycle -eq "$max_iters"; then
+ error 0 "Maximum of $max_iters cycles exceeded"
+ break
+ fi
+
+ # report progress
+ cycle=`expr $cycle + 1`
+ verbose "Cycle $cycle for $command_line_filename"
+
+ xref_files_save
+
+ # We run bibtex first, because I can see reasons for the indexes
+ # to change after bibtex is run, but I see no reason for the
+ # converse.
+ run_bibtex
+ run_index
+ run_core_conversion
+
+ xref_files_changed || break
+ done
+
+ # If we were using thumbpdf and producing PDF, then run thumbpdf
+ # and TeX one last time.
+ run_thumbpdf
+
+ # If we are using tex4ht, call it.
+ run_tex4ht
+
+ # Install the result if we didn't already (i.e., if the output is
+ # dvipdf or ps).
+ case $latex2html:$out_lang in
+ *:dvipdf)
+ run_dvipdf "$in_noext.`out_lang_tex`"
+ move_to_dest "$in_noext.`out_lang_ext`"
+ ;;
+ *:ps)
+ : {DVIPS=dvips}
+ $DVIPS -o "$in_noext.`out_lang_ext`" "$in_noext.`out_lang_tex`"
+ move_to_dest "$in_noext.`out_lang_ext`"
+ ;;
+ esac
+
+ cd_orig
+}
+
+## -------------------------------- ##
+## TeX processing auxiliary tools. ##
+## -------------------------------- ##
+
+
+# A sed script that preprocesses Texinfo sources in order to keep the
+# iftex sections only. We want to remove non TeX sections, and comment
+# (with `@c _texi2dvi') TeX sections so that makeinfo does not try to
+# parse them. Nevertheless, while commenting TeX sections, don't
+# comment @macro/@end macro so that makeinfo does propagate them.
+# Unfortunately makeinfo --iftex --no-ifinfo doesn't work well enough
+# (yet), makeinfo can't parse the TeX commands, so work around with sed.
+#
+# We assume that `@c _texi2dvi' starting a line is not present in the
+# document.
+#
+comment_iftex=\
+'/^@tex/,/^@end tex/{
+ s/^/@c _texi2dvi/
+}
+/^@iftex/,/^@end iftex/{
+ s/^/@c _texi2dvi/
+ /^@c _texi2dvi@macro/,/^@c _texi2dvi@end macro/{
+ s/^@c _texi2dvi//
+ }
+}
+/^@ifnottex/,/^@end ifnottex/{
+ s/^/@c (_texi2dvi)/
+}
+/^@ifinfo/,/^@end ifinfo/{
+ /^@node/p
+ /^@menu/,/^@end menu/p
+ t
+ s/^/@c (_texi2dvi)/
+}
+s/^@ifnotinfo/@c _texi2dvi@ifnotinfo/
+s/^@end ifnotinfo/@c _texi2dvi@end ifnotinfo/'
+
+# Uncommenting is simpler: remove any leading `@c texi2dvi'; repeated
+# copies can sneak in via macro invocations.
+uncomment_iftex='s/^@c _texi2dvi\(@c _texi2dvi\)*//'
+
+
+# run_makeinfo ()
+# ---------------
+# Expand macro commands in the original source file using Makeinfo.
+# Always use `end' footnote style, since the `separate' style
+# generates different output (arguably this is a bug in -E). Discard
+# main info output, the user asked to run TeX, not makeinfo.
+run_makeinfo ()
+{
+ test $in_lang = texinfo \
+ || return 0
+
+ # Unless required by the user, makeinfo expansion is wanted only
+ # if texinfo.tex is too old.
+ if $expand; then
+ makeinfo=${MAKEINFO:-makeinfo}
+ else
+ # Check if texinfo.tex performs macro expansion by looking for
+ # its version. The version is a date of the form YEAR-MO-DA.
+ # We don't need to use [0-9] to match the digits since anyway
+ # the comparison with $txiprereq, a number, will fail with non
+ # digits.
+ # Run in a temporary directory to avoid leaving files.
+ version_test_dir=$t2ddir/version_test
+ ensure_dir "$version_test_dir"
+ (
+ cd "$version_test_dir"
+ echo '\input texinfo.tex @bye' >txiversion.tex
+ # Be sure that if tex wants to fail, it is not interactive:
+ # close stdin.
+ $TEX txiversion.tex </dev/null >txiversion.out 2>txiversion.err
+ )
+ if test $? != 0; then
+ cat "$version_test_dir/txiversion.out"
+ cat "$version_test_dir/txiversion.err" >&2
+ error 1 "texinfo.tex appears to be broken, quitting."
+ fi
+ eval `sed -n 's/^.*\[\(.*\)version \(....\)-\(..\)-\(..\).*$/txiformat=\1 txiversion="\2\3\4"/p' "$version_test_dir/txiversion.out"`
+ verbose "texinfo.tex preloaded as \`$txiformat', version is \`$txiversion' ..."
+ if test "$txiprereq" -le "$txiversion" >&6 2>&1; then
+ makeinfo=
+ else
+ makeinfo=${MAKEINFO:-makeinfo}
+ fi
+ # If TeX is preloaded, offer the user this convenience:
+ if test "$txiformat" = Texinfo; then
+ escape=@
+ fi
+ fi
+
+ if test -n "$makeinfo"; then
+ # in_src: the file with macros expanded.
+ # Use the same basename to generate the same aux file names.
+ work_src=$workdir/src
+ ensure_dir "$work_src"
+ in_src=$work_src/$in_base
+ local miincludes
+ miincludes=`list_prefix includes -I`
+ verbose "Macro-expanding $command_line_filename to $in_src ..."
+ # eval $makeinfo because it might be defined as something complex
+ # (running missing) and then we end up with things like '"-I"',
+ # and "-I" (including the quotes) is not an option name. This
+ # happens with gettext 0.14.5, at least.
+ sed "$comment_iftex" "$command_line_filename" \
+ | eval $makeinfo --footnote-style=end -I "$in_dir" $miincludes \
+ -o /dev/null --macro-expand=- \
+ | sed "$uncomment_iftex" >"$in_src"
+ # Continue only if everything succeeded.
+ if test $? -ne 0 \
+ || test ! -r "$in_src"; then
+ verbose "Expansion failed, ignored...";
+ else
+ in_input=$in_src
+ fi
+ fi
+}
+
+# insert_commands ()
+# ------------------
+# Used most commonly for @finalout, @smallbook, etc.
+insert_commands ()
+{
+ if test -n "$textra"; then
+ # _xtr. The file with the user's extra commands.
+ work_xtr=$workdir/xtr
+ in_xtr=$work_xtr/$in_base
+ ensure_dir "$work_xtr"
+ verbose "Inserting extra commands: $textra"
+ local textra_cmd
+ case $in_lang in
+ latex) textra_cmd=1i;;
+ texinfo) textra_cmd='/^@setfilename/a';;
+ *) error 1 "internal error, unknown language: $in_lang";;
+ esac
+ sed "$textra_cmd\\
+$textra" "$in_input" >"$in_xtr"
+ in_input=$in_xtr
+ fi
+
+ case $in_lang:$latex2html:`out_lang_tex` in
+ latex:tex4ht:html)
+ # _tex4ht. The file with the added \usepackage{tex4ht}.
+ work_tex4ht=$workdir/tex4ht
+ in_tex4ht=$work_tex4ht/$in_base
+ ensure_dir "$work_tex4ht"
+ verbose "Inserting \\usepackage{tex4ht}"
+ perl -pe 's<\\documentclass(?:\[.*\])?{.*}>
+ <$&\\usepackage[xhtml]{tex4ht}>' \
+ "$in_input" >"$in_tex4ht"
+ in_input=$in_tex4ht
+ ;;
+ esac
+}
+
+# run_recode ()
+# -------------
+# If this is a Texinfo file with a specified input encoding, and
+# recode is available, then recode to plain 7 bit Texinfo.
+run_recode ()
+{
+ local from
+ local to
+
+ if test $in_lang = texinfo; then
+ pgm='s/^ *@documentencoding *\([^ ][^ ]*\) *$/\1/
+ t found
+ d
+ :found
+ q'
+ encoding=`sed -e "$pgm" "$in_input"`
+ if $recode && test -n "$encoding" && findprog recode; then
+ if test -n "$recode_from"; then
+ from=$recode_from
+ to=$encoding
+ else
+ from=$encoding
+ to=$texinfo
+ fi
+ verbose "Recoding from $from to $to."
+ # _rcd. The Texinfo file recoded in 7bit.
+ work_rcd=$workdir/recode
+ in_rcd=$work_rcd/$in_base
+ ensure_dir "$work_rcd"
+ if recode "$encoding..$to" <"$in_input" >"$in_rcd" \
+ && test -s "$in_rcd"; then
+ in_input=$in_rcd
+ else
+ verbose "Recoding failed, using original input."
+ fi
+ fi
+ fi
+}
+
+# compute_language FILENAME
+# -------------------------
+# Return the short string describing the language in which FILENAME
+# is written: `texinfo' or `latex'.
+compute_language ()
+{
+ # If the user explicitly specified the language, use that.
+ # Otherwise, if the first line is \input texinfo, assume it's texinfo.
+ # Otherwise, guess from the file extension.
+ if test -n "$set_language"; then
+ echo $set_language
+ elif sed 1q "$1" | grep 'input texinfo' >&6; then
+ echo texinfo
+ else
+ # Get the type of the file (latex or texinfo) from the given language
+ # we just guessed, or from the file extension if not set yet.
+ case $1 in
+ *.ltx | *.tex | *.drv | *.dtx) echo latex;;
+ *) echo texinfo;;
+ esac
+ fi
+}
+
+
+# run_hevea (MODE)
+# ----------------
+# Convert to HTML/INFO/TEXT.
+#
+# Don't pass `-noiso' to hevea: it's useless in HTML since anyway the
+# charset is set to latin1, and troublesome in other modes since
+# accented characters loose their accents.
+#
+# Don't pass `-o DEST' to hevea because in that case it leaves all its
+# auxiliary files there too... Too bad, because it means we will need
+# to handle images some day.
+run_hevea ()
+{
+ local hevea="${HEVEA:-hevea}"
+ local run_hevea="$hevea"
+
+ case $1 in
+ html) ;;
+ text|info) run_hevea="$run_hevea -$1";;
+ *) error 1 "run_hevea: invalid argument: $1";;
+ esac
+
+ # Compiling to the tmp directory enables to preserve a previous
+ # successful compilation.
+ run_hevea="$run_hevea -fix -O -o '$out_base'"
+ run_hevea="$run_hevea `list_prefix includes -I` -I '$orig_pwd' "
+ run_hevea="$run_hevea '$in_input'"
+
+ if $debug; then
+ run_hevea="$run_hevea -v -v"
+ fi
+
+ verbose "running $run_hevea"
+ if eval "$run_hevea" >&5; then
+ # hevea leaves trailing white spaces, this is annoying.
+ case $1 in text|info)
+ perl -pi -e 's/[ \t]+$//g' "$out_base"*;;
+ esac
+ case $1 in
+ html|text) move_to_dest "$out_base";;
+ info) # There can be foo.info-1, foo.info-2 etc.
+ move_to_dest "$out_base"*;;
+ esac
+ else
+ error 1 "$hevea exited with bad status, quitting."
+ fi
+}
+
+
+# run_core_conversion ()
+# ----------------------
+# Run the TeX (or HeVeA).
+run_core_conversion ()
+{
+ case $in_lang:$latex2html:`out_lang_tex` in
+ *:dvi|*:pdf|latex:tex4ht:html)
+ run_tex;;
+ latex:*:html|latex:*:text|latex:*:info)
+ run_hevea $out_lang;;
+ *)
+ error 1 "invalid input/output combination: $in_lang/$out_lang";;
+ esac
+}
+
+
+# compile ()
+# ----------
+# Run the full compilation chain, from pre-processing to installation
+# of the output at its expected location.
+compile ()
+{
+ # Source file might include additional sources.
+ # We want `.:$orig_pwd' before anything else. (We'll add `.:' later
+ # after all other directories have been turned into absolute paths.)
+ # `.' goes first to ensure that any old .aux, .cps,
+ # etc. files in ${directory} don't get used in preference to fresher
+ # files in `.'. Include orig_pwd in case we are in clean build mode, where
+ # we have cd'd to a temp directory.
+ common="$orig_pwd$path_sep$in_dir$path_sep"
+ #
+ # If we have any includes, put those at the end.
+ # Keep a final path_sep to get the default (system) TeX directories included.
+ txincludes=`list_infix includes $path_sep`
+ test -n "$txincludes" && common="$common$txincludes$path_sep"
+ #
+ for var in $tex_envvars; do
+ eval val="\$common\$${var}_orig"
+ # Convert relative paths to absolute paths, so we can run in another
+ # directory (e.g., in clean build mode, or during the macro-support
+ # detection). ".:" is added here.
+ val=`absolute_filenames "$val"`
+ eval $var="\"$val\""
+ export $var
+ eval verbose \"$var=\'\$${var}\'\"
+ done
+
+ # --expand
+ run_makeinfo
+
+ # --command, --texinfo
+ insert_commands
+
+ # --recode
+ run_recode
+
+ # Run until a fix point is reached.
+ run_tex_suite
+}
+
+
+# remove FILES
+# ------------
+remove ()
+{
+ verbose "Removing" "$@"
+ rm -rf "$@"
+}
+
+
+# mostly_clean
+# ------------
+# Remove auxiliary files and directories. Changes the current directory.
+mostly_clean ()
+{
+ cd_orig
+ set X "$t2ddir"
+ shift
+ $tidy || {
+ local log="$work_build/$in_noext.log"
+ set X ${1+"$@"} "$log" `generated_files_get "$work_build/$in_noext"`
+ shift
+ }
+ remove ${1+"$@"}
+}
+
+
+# cleanup ()
+# ----------
+# Remove what should be removed according to options.
+# Called at the end of each compilation cycle, and at the end of
+# the script. Changes the current directory.
+cleanup ()
+{
+ case $build_mode in
+ local) cd_orig; remove "$t2ddir";;
+ clean) mostly_clean;;
+ tidy) ;;
+ esac
+}
+
+
+
+## ---------------------- ##
+## Command line parsing. ##
+## ---------------------- ##
+
+# Push a token among the arguments that will be used to notice when we
+# ended options/arguments parsing.
+# Use "set dummy ...; shift" rather than 'set - ..." because on
+# Solaris set - turns off set -x (but keeps set -e).
+# Use ${1+"$@"} rather than "$@" because Digital Unix and Ultrix 4.3
+# still expand "$@" to a single argument (the empty string) rather
+# than nothing at all.
+arg_sep="$$--$$"
+set dummy ${1+"$@"} "$arg_sep"; shift
+
+#
+# Parse command line arguments.
+while test x"$1" != x"$arg_sep"; do
+
+ # Handle --option=value by splitting apart and putting back on argv.
+ case "$1" in
+ --*=*)
+ opt=`echo "$1" | sed -e 's/=.*//'`
+ val=`echo "$1" | sed -e 's/[^=]*=//'`
+ shift
+ set dummy "$opt" "$val" ${1+"$@"}; shift
+ ;;
+ esac
+
+ # This recognizes --quark as --quiet. Oh well.
+ case "$1" in
+ -@ ) escape=@;;
+ -~ ) catcode_special=false;;
+ # Silently and without documentation accept -b and --b[atch] as synonyms.
+ -b | --batch) batch=true;;
+ --build) shift; build_mode=$1;;
+ --build-dir) shift; build_dir=$1; build_mode=tidy;;
+ -c | --clean) build_mode=clean;;
+ -D | --debug) debug=true;;
+ -e | -E | --expand) expand=true;;
+ -h | --help) usage;;
+ -I) shift; list_concat_dirs includes "$1";;
+ -l | --lang | --language) shift; set_language=$1;;
+ --mostly-clean) action=mostly-clean;;
+ --no-line-error) line_error=false;;
+ --max-iterations) shift; max_iters=$1;;
+ -o | --out | --output)
+ shift
+ # Make it absolute, just in case we also have --clean, or whatever.
+ oname=`absolute "$1"`;;
+
+ # Output formats.
+ -O|--output-format) shift; out_lang_set "$1";;
+ --dvi|--dvipdf|--html|--info|--pdf|--ps|--text)
+ out_lang_set `echo "x$1" | sed 's/^x--//'`;;
+
+ -p) out_lang_set pdf;;
+ -q | -s | --quiet | --silent) quiet=true; batch=true;;
+ -r | --recode) recode=true;;
+ --recode-from) shift; recode=true; recode_from="$1";;
+ --src-specials) src_specials=--src-specials;;
+ --tex4ht) latex2html=tex4ht;;
+ -t | --texinfo | --command ) shift; textra="$textra\\
+"`echo "$1" | sed 's/\\\\/\\\\\\\\/g'`;;
+ --translate-file ) shift; translate_file="$1";;
+ --tidy) build_mode=tidy;;
+ -v | --vers*) version;;
+ -V | --verb*) verb=true;;
+ --) # What remains are not options.
+ shift
+ while test x"$1" != x"$arg_sep"; do
+ set dummy ${1+"$@"} "$1"; shift
+ shift
+ done
+ break;;
+ -*)
+ error 1 "Unknown or ambiguous option \`$1'." \
+ "Try \`--help' for more information."
+ ;;
+ *) set dummy ${1+"$@"} "$1"; shift;;
+ esac
+ shift
+done
+# Pop the token
+shift
+
+# $tidy: compile in a t2d directory.
+# $clean: remove all the aux files.
+case $build_mode in
+ local) clean=false; tidy=false;;
+ tidy) clean=false; tidy=true;;
+ clean) clean=true; tidy=true;;
+ *) error 1 "invalid build mode: $build_mode";;
+esac
+
+# Interpret remaining command line args as filenames.
+case $# in
+ 0)
+ error 2 "Missing file arguments." "Try \`--help' for more information."
+ ;;
+ 1) ;;
+ *)
+ if test -n "$oname"; then
+ error 2 "Can't use option \`--output' with more than one argument."
+ fi
+ ;;
+esac
+
+
+# We can't do much without tex.
+#
+if findprog ${TEX:-tex}; then :; else cat <<EOM
+You don't have a working TeX binary (${TEX:-tex}) installed anywhere in
+your PATH, and texi2dvi cannot proceed without one. If you want to use
+this script, you'll need to install TeX (if you don't have it) or change
+your PATH or TEX environment variable (if you do). See the --help
+output for more details.
+
+For information about obtaining TeX, please see http://www.tug.org. If
+you happen to be using Debian, you can get it with this command:
+ apt-get install tetex-bin
+EOM
+ exit 1
+fi
+
+
+# We want to use etex (or pdftex) if they are available, and the user
+# didn't explicitly specify. We don't check for elatex and pdfelatex
+# because (as of 2003), the LaTeX team has asked that new distributions
+# use etex by default anyway.
+#
+# End up with the TEX and PDFTEX variables set to what we are going to use.
+if test -z "$TEX"; then
+ if findprog etex; then TEX=etex; else TEX=tex; fi
+fi
+#
+if test -z "$PDFTEX"; then
+ if findprog pdfetex; then PDFTEX=pdfetex; else PDFTEX=pdftex; fi
+fi
+
+
+# File descriptor usage:
+# 0 standard input
+# 1 standard output (--verbose messages)
+# 2 standard error
+# 3 some systems may open it to /dev/tty
+# 4 used on the Kubota Titan
+# 5 tools output (turned off by --quiet)
+# 6 tracing/debugging (set -x output, etc.)
+
+
+# Main tools' output (TeX, etc.) that TeX users are used to seeing.
+#
+# If quiet, discard, else redirect to the message flow.
+if $quiet; then
+ exec 5>/dev/null
+else
+ exec 5>&1
+fi
+
+
+# Enable tracing, and auxiliary tools output.
+#
+# Should be used where you'd typically use /dev/null to throw output
+# away. But sometimes it is convenient to see that output (e.g., from
+# a grep) to aid debugging. Especially debugging at distance, via the
+# user.
+if $debug; then
+ exec 6>&1
+ set -x
+else
+ exec 6>/dev/null
+fi
+
+#
+
+# input_file_name_decode
+# ----------------------
+# Decode COMMAND_LINE_FILENAME, and compute:
+# - COMMAND_LINE_FILENAME clean of TeX commands
+# - IN_DIR
+# The directory to the input file, possibly absolute if needed.
+# - IN_DIR_ABS
+# The absolute directory of the input file.
+# - IN_BASE
+# The input file base name (no directory part).
+# - IN_NOEXT
+# The input file name without extensions (nor directory part).
+# - IN_INPUT
+# Defaults to COMMAND_LINE_FILENAME, but might change if the
+# input is preprocessed (recode etc.). With directory, possibly absolute.
+input_file_name_decode ()
+{
+ # See if we are run from within AUC-Tex, in which case we are
+ # passed `\input{FOO.tex}' or even `\nonstopmode\input{FOO.tex}'.
+ case $command_line_filename in
+ *\\nonstopmode*)
+ batch=true;;
+ esac
+ case $command_line_filename in
+ *\\input{*}*)
+ # Let AUC-TeX error parser deal with line numbers.
+ line_error=false
+ command_line_filename=`\
+ expr X"$command_line_filename" : X'.*input{\([^}]*\)}'`
+ ;;
+ esac
+
+ # If the COMMAND_LINE_FILENAME is not absolute (e.g., --debug.tex),
+ # prepend `./' in order to avoid that the tools take it as an option.
+ echo "$command_line_filename" | $EGREP '^(/|[A-z]:/)' >&6 \
+ || command_line_filename="./$command_line_filename"
+
+ # See if the file exists. If it doesn't we're in trouble since, even
+ # though the user may be able to reenter a valid filename at the tex
+ # prompt (assuming they're attending the terminal), this script won't
+ # be able to find the right xref files and so forth.
+ test -r "$command_line_filename" ||
+ error 1 "cannot read $command_line_filename, skipping."
+
+ # Get the name of the current directory.
+ in_dir=`func_dirname "$command_line_filename"`
+ in_dir_abs=`absolute "$in_dir"`
+ # In a clean build, we `cd', so get an absolute file name.
+ if $tidy; then
+ in_dir=$in_dir_abs
+ fi
+
+ # Strip directory part but leave extension.
+ in_base=`basename "$command_line_filename"`
+ # Strip extension.
+ in_noext=`noext "$in_base"`
+
+ # The normalized file name to compile. Must always point to the
+ # file to actually compile (in case of recoding, macro-expansion etc.).
+ in_input=$in_dir/$in_base
+
+
+ # Compute the output file name.
+ if test x"$oname" != x; then
+ out_name=$oname
+ else
+ out_name=$in_noext.`out_lang_ext`
+ fi
+ out_dir=`func_dirname "$out_name"`
+ out_dir_abs=`absolute "$out_dir"`
+ out_base=`basename "$out_name"`
+ out_noext=`noext "$out_base"`
+}
+
+
+## -------------- ##
+## TeXify files. ##
+## -------------- ##
+
+for command_line_filename
+do
+ verbose "Processing $command_line_filename ..."
+
+ input_file_name_decode
+
+ # `texinfo' or `latex'?
+ in_lang=`compute_language "$command_line_filename"`
+
+ # An auxiliary directory used for all the auxiliary tasks involved
+ # in compiling this document.
+ case $build_dir in
+ '' | . ) t2ddir=$out_noext.t2d ;;
+ *) # Avoid collisions between multiple occurrences of the same
+ # file, so depend on the output path. Remove leading `./',
+ # at least to avoid creating a file starting with `.!', i.e.,
+ # an invisible file. The sed expression is fragile if the cwd
+ # has active characters. Transform / into ! so that we don't
+ # need `mkdir -p'. It might be something to reconsider.
+ t2ddir=$build_dir/`echo "$out_dir_abs/$out_noext.t2d" |
+ sed "s,^$orig_pwd/,,;s,^\./,,;s,/,!,g"`
+ esac
+ # Remove it at exit if clean mode.
+ trap "cleanup" 0 1 2 15
+
+ ensure_dir "$build_dir" "$t2ddir"
+
+ # We will change directory, better work with an absolute path...
+ t2ddir=`absolute "$t2ddir"`
+ # Sometimes there are incompatibilities between auxiliary files for
+ # DVI and PDF. The contents can also change whether we work on PDF
+ # and/or DVI. So keep separate spaces for each.
+ workdir=$t2ddir/`out_lang_tex`
+ ensure_dir "$workdir"
+
+ # _build. In a tidy build, where the auxiliary files are output.
+ if $tidy; then
+ work_build=$workdir/build
+ else
+ work_build=.
+ fi
+
+ # _bak. Copies of the previous auxiliary files (another round is
+ # run if they differ from the new ones).
+ work_bak=$workdir/bak
+
+ # Make those directories.
+ ensure_dir "$work_build" "$work_bak"
+
+ case $action in
+ compile)
+ # Compile the document.
+ compile
+ cleanup
+ ;;
+
+ mostly-clean)
+ mostly_clean
+ ;;
+ esac
+done
+
+verbose "done."
+exit 0 # exit successfully, not however we ended the loop.
--
1.5.6.5
* milena/doc/doc.mk,
* milena/doc/examples/make.mk,
* milena/doc/ref_guide/ref_guide.mk,
* milena/doc/technical/technical.mk,
* milena/doc/tutorial/tutorial.mk,
* milena/doc/white_paper/white_paper.mk:
Remove and inline these files...
* doc/Makefile.am: ...here.
Do not include $(top_srcdir)/milena/tests/tests.mk.
(AM_CPPFLAGS): Add -I$(top_srcdir)/milena and
-I$(top_builddir)/milena.
($(DOC_SRCDIR)/examples-outputs.mk): Depend on
$(srcdir)/Makefile.am instead of $(srcdir)/examples/make.mk.
---
milena/ChangeLog | 18 +
milena/doc/Makefile.am | 688 ++++++++++++++++++++++++++++++++-
milena/doc/doc.mk | 42 --
milena/doc/examples/make.mk | 337 ----------------
milena/doc/ref_guide/ref_guide.mk | 104 -----
milena/doc/technical/technical.mk | 100 -----
milena/doc/tutorial/tutorial.mk | 103 -----
milena/doc/white_paper/white_paper.mk | 103 -----
8 files changed, 696 insertions(+), 799 deletions(-)
delete mode 100644 milena/doc/doc.mk
delete mode 100644 milena/doc/examples/make.mk
delete mode 100644 milena/doc/ref_guide/ref_guide.mk
delete mode 100644 milena/doc/technical/technical.mk
delete mode 100644 milena/doc/tutorial/tutorial.mk
delete mode 100644 milena/doc/white_paper/white_paper.mk
diff --git a/milena/ChangeLog b/milena/ChangeLog
index c0bf909..dd2857e 100644
--- a/milena/ChangeLog
+++ b/milena/ChangeLog
@@ -1,5 +1,23 @@
2010-03-12 Roland Levillain <roland(a)lrde.epita.fr>
+ Merge doc/ Makefile helpers into doc/Makefile.am.
+
+ * milena/doc/doc.mk,
+ * milena/doc/examples/make.mk,
+ * milena/doc/ref_guide/ref_guide.mk,
+ * milena/doc/technical/technical.mk,
+ * milena/doc/tutorial/tutorial.mk,
+ * milena/doc/white_paper/white_paper.mk:
+ Remove and inline these files...
+ * doc/Makefile.am: ...here.
+ Do not include $(top_srcdir)/milena/tests/tests.mk.
+ (AM_CPPFLAGS): Add -I$(top_srcdir)/milena and
+ -I$(top_builddir)/milena.
+ ($(DOC_SRCDIR)/examples-outputs.mk): Depend on
+ $(srcdir)/Makefile.am instead of $(srcdir)/examples/make.mk.
+
+2010-03-12 Roland Levillain <roland(a)lrde.epita.fr>
+
Improve dependencies of some documentation products.
* doc/ref_guide/ref_guide.mk (ref_guide_dependencies):
diff --git a/milena/doc/Makefile.am b/milena/doc/Makefile.am
index 0018406..5f2b9ba 100644
--- a/milena/doc/Makefile.am
+++ b/milena/doc/Makefile.am
@@ -1,3 +1,4 @@
+
# Copyright (C) 2007, 2008, 2009, 2010 EPITA Research and Development
# Laboratory (LRDE).
#
@@ -18,10 +19,31 @@
# FIXME: To be overhauled! (See ticket #134).
-# FIXME: Do not use includes if they're used only once: inline them
-# instead.
-
-include $(top_srcdir)/milena/doc/doc.mk
+# Look for Milena header in srcdir and for (generated) test headers
+# (like data.hh) in builddir.
+AM_CPPFLAGS = \
+ -I$(top_srcdir)/milena -I$(top_builddir)/milena \
+ -I$(top_srcdir)/milena/doc/tutorial/tools
+
+## FIXME: Upercase variables should be reserved for special names:
+## classical Make variables (e.g. `CXXFLAGS') Automake variables
+## (e.g. `SUBDIRS'), etc.
+##
+## Moreover, do we really need all these variables? See how things
+## evolve during the refurbishing of the build system in doc/.
+DOC_SRCDIR = $(top_srcdir)/milena/doc
+OUTPUTS_SRCDIR = $(DOC_SRCDIR)/outputs
+SPLIT_OUTPUTS_SRCDIR = $(OUTPUTS_SRCDIR)/split
+FIGURES_SRCDIR = $(DOC_SRCDIR)/figures
+EXAMPLES_SRCDIR = $(DOC_SRCDIR)/examples
+SPLIT_EXAMPLES_SRCDIR = $(EXAMPLES_SRCDIR)/split
+IMG_SRCDIR = $(DOC_SRCDIR)/img
+
+# FIXME: Adjust when $(DOC_SRCDIR) is renamed or removed.
+#
+# FIXME: Careful, `doc_dir' is close to `docdir', which is reserved by
+# Automake. Change this name?
+doc_dir = $(DOC_SRCDIR)
DOXYGEN = doxygen
@@ -64,7 +86,7 @@ doc-devel-html: tutorial-html ref-guide-html white-paper-html \
# FIXME: Check these dependencies.
# 1. They might be redundant.
# 2. It'd be better to depend on actual files rather than timestamps
-# correponding to a bunch of files.
+# corresponding to a bunch of files.
refman_dependencies = \
$(srcdir)/split-examples.stamp \
$(srcdir)/split-outputs.stamp \
@@ -242,28 +264,357 @@ maintainer-clean-local:
## Technical Documentation. ##
## ------------------------- ##
-include $(srcdir)/technical/technical.mk
+.PHONY: technical technical-html technical-pdf
+
+technical_TEXINPUTS ="$(DOC_SRCDIR):$(OUTPUTS_SRCDIR):$(srcdir):\
+$(SPLIT_OUTPUTS_SRCDIR):$(IMG_SRCDIR):$(SPLIT_EXAMPLES_SRCDIR):"
+
+
+technical: technical-html technical-pdf
+
+technical_dir = $(doc_dir)/technical
+
+# FIXME: Check these dependencies. And it'd be better to depend on
+# actual files rather than timestamps corresponding to a bunch of
+# files.
+technical_dependencies = $(doc_dir)/figures.stamp
+
+# FIXME: As in milena/doc/Makefile.am, we should probably strip
+# $(srcdir) prefixes from target variables, e.g. instead of:
+#
+# FOO = $(srcdir)/foo.pdf
+# $(FOO): foo.tex bar.tex
+# dist_doc_DATA = $(FOO)
+#
+# we should use:
+#
+# FOO = foo.pdf
+# $(srcdir)/$(FOO): foo.tex bar.tex
+# dist_doc_DATA = $(FOO)
+#
+# since it minimizes installation issues (see milena/doc/Makefile.am
+# and Vaucanson's doc/Makefile.am).
+
+# FIXME: Distributed products should be generated in the source dir.
+# That's actually the case, *but* the current solution is not clean
+# and might break sometimes. The clean approach is to create a
+# temporary directory, try to generate the documentation there, and
+# move its contents to the source dir in case of success. If the
+# product is a directory, also refresh a timestamp (in the source
+# dir).
+
+# Intermediate product for the various doc targets of the parent
+# directory.
+#
+# This is not a bug: TECHNICAL_HH is meant to have a `.hh'
+# extension, since it is later parsed by Doxygen, which complains
+# about `.html' files.
+TECHNICAL_HH = $(technical_dir)/technical.hh
+technical-html: $(TECHNICAL_HH)
+$(TECHNICAL_HH): $(technical_dir)/technical.tex $(technical_dependencies)
+ $(doc_dir)/tools/todoxygen.sh $< $(technical_dir) $(doc_dir)
+
+
+# Final product.
+TECHNICAL_PDF = $(technical_dir)/technical.pdf
+technical-pdf: $(TECHNICAL_PDF)
+$(TECHNICAL_PDF): $(technical_dir)/technical.tex $(technical_dependencies)
+ TEXINPUTS=$(technical_TEXINPUTS) pdflatex $<
+ TEXINPUTS=$(technical_TEXINPUTS) pdflatex $<
+ TEXINPUTS=$(technical_TEXINPUTS) pdflatex $< \
+ test "x$(technical_dir)" != x. \
+ && mv -f technical.pdf $(technical_dir)
+
+dist_doc_DATA += $(TECHNICAL_PDF)
+
+EXTRA_DIST += \
+ $(technical_dir)/technical.tex \
+ $(TECHNICAL_HH)
+
+# FIXME: Remove `tutorial.aux tutorial.toc tutorial.log tutorial.bbl
+# tutorial.out *blg *.lot' when texi2dvi is used.
+#
+# FIXME: Why is $(TUTORIAL_PDF) listed here? It should be
+# automatically cleaned. I'm not sure either about the rest of
+# CLEANFILES.
+CLEANFILES += \
+ technical.aux technical.toc technical.log technical.bbl technical.out \
+ *blg *.lot \
+ $(TECHNICAL_PDF) \
+ *.haux *.hh *.html *.htoc \
+ technical.html \
+ technical.idx \
+ $(TECHNICAL_HH)
## ---------- ##
## Tutorial. ##
## ---------- ##
-include $(srcdir)/tutorial/tutorial.mk
+.PHONY: tutorial tutorial-html tutorial-pdf
+
+tutorial_TEXINPUTS = $(DOC_SRCDIR):$(OUTPUTS_SRCDIR):$(IMG_SRCDIR):
+
+
+tutorial: tutorial-html tutorial-pdf
+
+tutorial_dir = $(doc_dir)/tutorial
+
+# FIXME: Check these dependencies.
+# 1. They might be redundant.
+# 2. It'd be better to depend on actual files rather than timestamps
+# corresponding to a bunch of files.
+tutorial_dependencies = \
+ $(doc_dir)/split-examples.stamp \
+ $(doc_dir)/split-outputs.stamp \
+ $(doc_dir)/figures.stamp
+
+# FIXME: As in milena/doc/Makefile.am, we should probably strip
+# $(srcdir) prefixes from target variables, e.g. instead of:
+#
+# FOO = $(srcdir)/foo.pdf
+# $(FOO): foo.tex bar.tex
+# dist_doc_DATA = $(FOO)
+#
+# we should use:
+#
+# FOO = foo.pdf
+# $(srcdir)/$(FOO): foo.tex bar.tex
+# dist_doc_DATA = $(FOO)
+#
+# since it minimizes installation issues (see milena/doc/Makefile.am
+# and Vaucanson's doc/Makefile.am).
+
+# FIXME: Distributed products should be generated in the source dir.
+# That's actually the case, *but* the current solution is not clean
+# and might break sometimes. The clean approach is to create a
+# temporary directory, try to generate the documentation there, and
+# move its contents to the source dir in case of success. If the
+# product is a directory, also refresh a timestamp (in the source
+# dir).
+
+# Intermediate product for the various doc targets of the parent
+# directory.
+#
+# This is not a bug: TUTORIAL_HH is meant to have a `.hh' extension,
+# since it is later parsed by Doxygen, which complains about `.html'
+# files.
+TUTORIAL_HH = $(tutorial_dir)/tutorial.hh
+tutorial-html: $(TUTORIAL_HH)
+$(TUTORIAL_HH): $(tutorial_dir)/tutorial.tex $(tutorial_dependencies)
+ $(doc_dir)/tools/todoxygen.sh $< $(tutorial_dir) $(doc_dir)
+
+# Final product.
+TUTORIAL_PDF = $(tutorial_dir)/tutorial.pdf
+tutorial-pdf: $(TUTORIAL_PDF)
+$(TUTORIAL_PDF): $(tutorial_dir)/tutorial.tex $(tutorial_dependencies)
+ TEXINPUTS=$(tutorial_TEXINPUTS) pdflatex $<
+ TEXINPUTS=$(tutorial_TEXINPUTS) pdflatex $<
+ TEXINPUTS=$(tutorial_TEXINPUTS) pdflatex $< \
+ test x"$(tutorial_dir)" != x. \
+ && mv -f tutorial.pdf $(tutorial_dir)
+
+dist_doc_DATA += $(TUTORIAL_PDF)
+
+EXTRA_DIST += \
+ $(tutorial_dir)/tutorial.tex \
+ $(TUTORIAL_HH)
+
+# FIXME: Remove `tutorial.aux tutorial.toc tutorial.log tutorial.bbl
+# tutorial.out *blg *.lot' when texi2dvi is used.
+#
+# FIXME: Why is $(TUTORIAL_PDF) listed here? It should be
+# automatically cleaned. I'm not sure either about the rest of
+# CLEANFILES.
+CLEANFILES += \
+ tutorial.aux tutorial.toc tutorial.log tutorial.bbl tutorial.out \
+ *blg *.lot \
+ $(TUTORIAL_PDF) \
+ *.haux *.hh *.html *.htoc \
+ tutorial.html \
+ tutorial.idx \
+ $(TUTORIAL_HH)
## ------------- ##
## White Paper. ##
## ------------- ##
-include $(srcdir)/white_paper/white_paper.mk
+.PHONY: white-paper white-paper-html white-paper-pdf
+
+white_paper_dir = $(doc_dir)/white_paper
+
+white_paper_TEXINPUTS = $(DOC_SRCDIR):$(white_paper_dir):
+
+PNGS = \
+ $(white_paper_dir)/figures/house.png \
+ $(white_paper_dir)/figures/house_rag.png \
+ $(white_paper_dir)/figures/house_wshed.png \
+ $(white_paper_dir)/figures/house_wshed_mean_colors.png
+
+EPSS = $(PNGS:png=eps)
+
+# FIXME: we want to check if 'figures' exists only once!
+.png.eps:
+ test -d figures || mkdir figures
+ convert $< $@
+
+
+white-paper: white-paper-pdf white-paper-html
+
+# FIXME: As in milena/doc/Makefile.am, we should probably strip
+# $(srcdir) prefixes from target variables, e.g. instead of:
+#
+# FOO = $(srcdir)/foo.pdf
+# $(FOO): foo.tex bar.tex
+# dist_doc_DATA = $(FOO)
+#
+# we should use:
+#
+# FOO = foo.pdf
+# $(srcdir)/$(FOO): foo.tex bar.tex
+# dist_doc_DATA = $(FOO)
+#
+# since it minimizes installation issues (see milena/doc/Makefile.am
+# and Vaucanson's doc/Makefile.am).
+
+# FIXME: Distributed products should be generated in the source dir.
+# That's actually the case, *but* the current solution is not clean
+# and might break sometimes. The clean approach is to create a
+# temporary directory, try to generate the documentation there, and
+# move its contents to the source dir in case of success. If the
+# product is a directory, also refresh a timestamp (in the source
+# dir).
+
+# FIXME: Use texi2dvi/TeX4ht instead of plain hevea.
+WHITE_PAPER_HTML = $(white_paper_dir)/white_paper_html/index.html
+white-paper-html: $(WHITE_PAPER_HTML)
+$(WHITE_PAPER_HTML): $(white_paper_dir)/white_paper.tex $(EPSS)
+ test -d $(white_paper_dir)/white_paper_html \
+ || mkdir $(white_paper_dir)/white_paper_html
+ TEXINPUTS=$(white_paper_TEXINPUTS) hevea -O -fix $< -o $@
+ $(doc_dir)/tools/clearbanner.sh $@
+
+
+WHITE_PAPER_PDF = $(white_paper_dir)/white_paper.pdf
+white-paper-pdf: $(WHITE_PAPER_PDF)
+$(WHITE_PAPER_PDF): $(white_paper_dir)/white_paper.tex $(PNGS)
+ TEXINPUTS=$(white_paper_TEXINPUTS) pdflatex $<
+ TEXINPUTS=$(white_paper_TEXINPUTS) pdflatex $<
+ TEXINPUTS=$(white_paper_TEXINPUTS) pdflatex $< \
+ test "x$(white_paper_dir)" != x. \
+ && mv -f white_paper.pdf $(white_paper_dir)
+
+
+dist_doc_DATA += \
+ $(WHITE_PAPER_HTML) \
+ $(WHITE_PAPER_PDF)
+
+EXTRA_DIST += \
+ $(white_paper_dir)/white_paper.tex \
+ $(PNGS) \
+ $(EPSS)
+
+# FIXME: Remove unnecessary patterns.
+CLEANFILES += \
+ white_paper_image.* \
+ white_paper.pdf \
+ *.log *.idx *.out *.aux
+
+# FIXME: Likewise.
+clean-local:
+ -rm -rf white_paper_html figures
## ----------------- ##
## Reference Guide. ##
## ----------------- ##
-include $(srcdir)/ref_guide/ref_guide.mk
+.PHONY: ref-guide ref-guide-html ref-guide-pdf
+
+ref_guide_TEXINPUTS = "$(DOC_SRCDIR):$(OUTPUTS_SRCDIR):\
+$(SPLIT_OUTPUTS_SRCDIR):$(IMG_SRCDIR):$(SPLIT_EXAMPLES_SRCDIR):"
+
+
+ref-guide: ref-guide-html ref-guide-pdf
+
+ref_guide_dir = $(doc_dir)/ref_guide
+
+# FIXME: Check these dependencies.
+# 1. They might be redundant.
+# 2. It'd be better to depend on actual files rather than timestamps
+# corresponding to a bunch of files.
+ref_guide_dependencies = \
+ $(doc_dir)/split-examples.stamp \
+ $(doc_dir)/split-outputs.stamp \
+ $(doc_dir)/figures.stamp
+
+# FIXME: As in milena/doc/Makefile.am, we should probably strip
+# $(srcdir) prefixes from target variables, e.g. instead of:
+#
+# FOO = $(srcdir)/foo.pdf
+# $(FOO): foo.tex bar.tex
+# dist_doc_DATA = $(FOO)
+#
+# we should use:
+#
+# FOO = foo.pdf
+# $(srcdir)/$(FOO): foo.tex bar.tex
+# dist_doc_DATA = $(FOO)
+#
+# since it minimizes installation issues (see milena/doc/Makefile.am
+# and Vaucanson's doc/Makefile.am).
+
+# FIXME: Distributed products should be generated in the source dir.
+# That's actually the case, *but* the current solution is not clean
+# and might break sometimes. The clean approach is to create a
+# temporary directory, try to generate the documentation there, and
+# move its contents to the source dir in case of success. If the
+# product is a directory, also refresh a timestamp (in the source
+# dir).
+
+# Intermediate product for the various doc targets of the parent
+# directory.
+#
+# This is not a bug: REF_GUIDE_HH is meant to have a `.hh' extension,
+# since it is later parsed by Doxygen, which complains about `.html'
+# files.
+REF_GUIDE_HH = $(ref_guide_dir)/ref_guide.hh
+ref-guide-html: $(REF_GUIDE_HH)
+$(REF_GUIDE_HH): $(ref_guide_dir)/ref_guide.tex $(ref_guide_dependencies)
+ $(doc_dir)/tools/todoxygen.sh $< $(ref_guide_dir) $(doc_dir)
+
+
+# Final product.
+REF_GUIDE_PDF = $(ref_guide_dir)/ref_guide.pdf
+ref-guide-pdf: $(REF_GUIDE_PDF)
+$(REF_GUIDE_PDF): $(ref_guide_dir)/ref_guide.tex $(ref_guide_dependencies)
+ TEXINPUTS=$(ref_guide_TEXINPUTS) pdflatex $<
+ TEXINPUTS=$(ref_guide_TEXINPUTS) pdflatex $<
+ TEXINPUTS=$(ref_guide_TEXINPUTS) pdflatex $< \
+ test x"$(ref_guide_dir)" != x. \
+ && mv -f ref_guide.pdf $(ref_guide_dir)
+
+dist_doc_DATA += $(REF_GUIDE_PDF)
+
+EXTRA_DIST += \
+ $(ref_guide_dir)/ref_guide.tex \
+ $(REF_GUIDE_HH)
+
+# FIXME: Remove `ref_guide.aux ref_guide.toc ref_guide.log
+# ref_guide.bbl ref_guide.out *blg *.lot' when texi2dvi is used.
+#
+# FIXME: Why is $(REF_GUIDE_PDF) listed here? It should be
+# automatically cleaned. I'm not sure either about the rest of
+# CLEANFILES.
+CLEANFILES += \
+ ref_guide.aux ref_guide.toc ref_guide.log ref_guide.bbl ref_guide.out \
+ *blg *.lot \
+ $(REF_GUIDE_PDF) \
+ *.haux *.hh *.html *.htoc \
+ ref_guide.html \
+ ref_guide.idx \
+ $(REF_GUIDE_HH)
## --------- ##
@@ -293,7 +644,323 @@ $(srcdir)/figures.stamp: $(FIGURES_SRCDIR)/*.p*m
## Examples. ##
## ---------- ##
-include $(top_srcdir)/milena/doc/examples/make.mk
+PROGRAMS_examples = \
+ accu-right-instanciation \
+ borderthickness \
+ box2d-bbox \
+ domain-display \
+ dpoint-1 \
+ estim-sites \
+ extend \
+ extension-ignore \
+ fill \
+ fill-call-1 \
+ fill-imageif-cfun \
+ fill-part-image \
+ fill-subdomain \
+ fill-subdomain-shorter \
+ first_routine \
+ forall-piter \
+ fun-p2v-1 \
+ graph-data \
+ graph-iter \
+ ima-has \
+ ima-save \
+ ima-size \
+ ima2d-1 \
+ ima2d-2 \
+ ima2d-3 \
+ ima2d-4 \
+ ima2d-5 \
+ ima2d-6-clone \
+ ima2d-7 \
+ ima2d-rot \
+ labeling-compute \
+ logical-not \
+ mln_var \
+ parray-append \
+ parray-bbox \
+ paste \
+ paste-call-1 \
+ point-1 \
+ predicate-1 \
+ win-create-1 \
+ win-create-2
+
+PROGRAMS_examples += \
+ tuto2_first_image \
+ tuto3_rw_image \
+ tuto4_genericity_and_algorithms
+
+# Tuto3
+PROGRAMS_examples += \
+ tuto3_colorize \
+ tuto3_println \
+ tuto3_trace
+
+# Tuto4
+PROGRAMS_examples += \
+ tuto4_point2d \
+ tuto4_site_set_create
+
+EXTRA_PROGRAMS = $(PROGRAMS_examples)
+CLEANFILES += $(PROGRAMS_examples)
+
+# FIXME: Careful, this will only work if this file is included by
+# `milena/doc/Makefile.am', since `examples/' is a subdirectory of
+# `milena/doc/'. Anyway, this file is to be eventually inlined in
+# `milena/doc/Makefile.am', so this is not a big deal.
+accu_right_instanciation_SOURCES = examples/accu-right-instanciation.cc
+borderthickness_SOURCES = examples/borderthickness.cc
+box2d_bbox_SOURCES = examples/box2d-bbox.cc
+domain_display_SOURCES = examples/domain-display.cc
+dpoint_1_SOURCES = examples/dpoint-1.cc
+estim_sites_SOURCES = examples/estim-sites.cc
+extend_SOURCES = examples/extend.cc
+extension_ignore_SOURCES = examples/extension-ignore.cc
+fill_SOURCES = examples/fill.cc
+fill_call_1_SOURCES = examples/fill-call-1.cc
+fill_imageif_cfun_SOURCES = examples/fill-imageif-cfun.cc
+fill_part_image_SOURCES = examples/fill-part-image.cc
+fill_subdomain_SOURCES = examples/fill-subdomain.cc
+fill_subdomain_shorter_SOURCES = examples/fill-subdomain-shorter.cc
+first_routine_SOURCES = examples/tuto3/first_routine.cc
+forall_piter_SOURCES = examples/forall-piter.cc
+fun_p2v_1_SOURCES = examples/fun-p2v-1.cc
+graph_data_SOURCES = examples/graph-data.cc
+graph_iter_SOURCES = examples/graph-iter.cc
+ima2d_1_SOURCES = examples/ima2d-1.cc
+ima2d_2_SOURCES = examples/ima2d-2.cc
+ima2d_3_SOURCES = examples/ima2d-3.cc
+ima2d_4_SOURCES = examples/ima2d-4.cc
+ima2d_5_SOURCES = examples/ima2d-5.cc
+ima2d_6_clone_SOURCES = examples/ima2d-6-clone.cc
+ima2d_7_SOURCES = examples/ima2d-7.cc
+ima2d_rot_SOURCES = examples/ima2d-rot.cc
+ima_has_SOURCES = examples/ima-has.cc
+ima_save_SOURCES = examples/ima-save.cc
+ima_size_SOURCES = examples/ima-size.cc
+labeling_compute_SOURCES = examples/labeling-compute.cc
+logical_not_SOURCES = examples/logical-not.cc
+mln_var_SOURCES = examples/mln_var.cc
+parray_append_SOURCES = examples/parray-append.cc
+parray_bbox_SOURCES = examples/parray-bbox.cc
+paste_SOURCES = examples/paste.cc
+paste_call_1_SOURCES = examples/paste-call-1.cc
+point_1_SOURCES = examples/point-1.cc
+predicate_1_SOURCES = examples/predicate-1.cc
+win_create_1_SOURCES = examples/win-create-1.cc
+win_create_2_SOURCES = examples/win-create-2.cc
+
+tuto2_first_image_SOURCES = examples/tuto2_first_image.cc
+tuto3_rw_image_SOURCES = examples/tuto3_rw_image.cc
+tuto4_genericity_and_algorithms_SOURCES = examples/tuto4_genericity_and_algorithms.cc
+
+# Tuto 3
+tuto3_colorize_SOURCES = examples/tuto3/colorize.cc
+tuto3_println_SOURCES = examples/tuto3/println.cc
+tuto3_trace_SOURCES = examples/tuto3/trace.cc
+
+# Tuto 4
+tuto4_point2d_SOURCES = examples/tuto4/point2d.cc
+tuto4_site_set_create_SOURCES = examples/tuto4/site_set_create.cc
+
+
+# FIXME: Try to avoid the use an extensive list: either use Make's
+# substitution rules or generate it. Actually this list is already
+# generated (see outputs/outputs.mk).
+OUTPUTS = \
+ $(srcdir)/outputs/accu-right-instanciation.txt \
+ $(srcdir)/outputs/borderthickness.txt \
+ $(srcdir)/outputs/box2d-bbox.txt \
+ $(srcdir)/outputs/domain-display.txt \
+ $(srcdir)/outputs/dpoint-1.txt \
+ $(srcdir)/outputs/estim-sites.txt \
+ $(srcdir)/outputs/extend.txt \
+ $(srcdir)/outputs/extension-ignore.txt \
+ $(srcdir)/outputs/fill-call-1.txt \
+ $(srcdir)/outputs/fill-imageif-cfun.txt \
+ $(srcdir)/outputs/fill-part-image.txt \
+ $(srcdir)/outputs/fill-subdomain-shorter.txt \
+ $(srcdir)/outputs/fill-subdomain.txt \
+ $(srcdir)/outputs/fill.txt \
+ $(srcdir)/outputs/first_routine.txt \
+ $(srcdir)/outputs/forall-piter.txt \
+ $(srcdir)/outputs/fun-p2v-1.txt \
+ $(srcdir)/outputs/graph-data.txt \
+ $(srcdir)/outputs/graph-iter.txt \
+ $(srcdir)/outputs/graph-output-1.txt \
+ $(srcdir)/outputs/ima2d-1.txt \
+ $(srcdir)/outputs/ima2d-2.txt \
+ $(srcdir)/outputs/ima2d-3.txt \
+ $(srcdir)/outputs/ima2d-4.txt \
+ $(srcdir)/outputs/ima2d-5.txt \
+ $(srcdir)/outputs/ima2d-6-clone.txt \
+ $(srcdir)/outputs/ima2d-7.txt \
+ $(srcdir)/outputs/ima2d-decl-2.txt \
+ $(srcdir)/outputs/ima2d-display-1.txt \
+ $(srcdir)/outputs/ima2d-display-2.txt \
+ $(srcdir)/outputs/ima2d-display-output-1.txt \
+ $(srcdir)/outputs/ima2d-display-output-2.txt \
+ $(srcdir)/outputs/ima2d-rot.txt \
+ $(srcdir)/outputs/ima-has.txt \
+ $(srcdir)/outputs/ima-save.txt \
+ $(srcdir)/outputs/ima-size.txt \
+ $(srcdir)/outputs/labeling-compute.txt \
+ $(srcdir)/outputs/logical-not.txt \
+ $(srcdir)/outputs/mln_var.txt \
+ $(srcdir)/outputs/parray-append.txt \
+ $(srcdir)/outputs/parray-bbox.txt \
+ $(srcdir)/outputs/parray-display-1.txt \
+ $(srcdir)/outputs/paste-call-1.txt \
+ $(srcdir)/outputs/paste.txt \
+ $(srcdir)/outputs/point-1.txt \
+ $(srcdir)/outputs/predicate-1.txt \
+ $(srcdir)/outputs/tuto2_first_image.txt \
+ $(srcdir)/outputs/tuto3_colorize.txt \
+ $(srcdir)/outputs/tuto3_println.txt \
+ $(srcdir)/outputs/tuto3_rw_image.txt \
+ $(srcdir)/outputs/tuto3_trace.txt \
+ $(srcdir)/outputs/tuto4_genericity_and_algorithms.txt \
+ $(srcdir)/outputs/tuto4_point2d.txt \
+ $(srcdir)/outputs/tuto4_site_set_create.txt \
+ $(srcdir)/outputs/win-create-1-display.txt \
+ $(srcdir)/outputs/win-create-1.txt \
+ $(srcdir)/outputs/win-create-2.txt
+
+$(DOC_SRCDIR)/examples-outputs.mk: $(DOC_SRCDIR)/gen-examples-outputs-mk $(srcdir)/Makefile.am
+ $(DOC_SRCDIR)/gen-examples-outputs-mk $(PROGRAMS_examples) >$@.tmp
+ mv -f $@.tmp $@
+ chmod -w $@
+include $(DOC_SRCDIR)/examples-outputs.mk
+
+MAINTAINERCLEANFILES += $(OUTPUTS)
+
+# FIXME: Warning: $(SPLIT_EXAMPLES_SRCDIR) might not exist. Ensure it
+# exists.
+
+# FIXME: Try to avoid the use an extensive list: either use Make's
+# substitution rules or generate it.
+EXAMPLES = \
+ $(srcdir)/examples/ima-save.cc \
+ $(srcdir)/examples/accu-right-instanciation.cc \
+ $(srcdir)/examples/borderthickness.cc \
+ $(srcdir)/examples/box2d-bbox.cc \
+ $(srcdir)/examples/domain-display.cc \
+ $(srcdir)/examples/dpoint-1.cc \
+ $(srcdir)/examples/estim-sites.cc \
+ $(srcdir)/examples/extend.cc \
+ $(srcdir)/examples/extension-ignore.cc \
+ $(srcdir)/examples/fill-call-1.cc \
+ $(srcdir)/examples/fill-imageif-cfun.cc \
+ $(srcdir)/examples/fill-part-image.cc \
+ $(srcdir)/examples/fill-subdomain-shorter.cc \
+ $(srcdir)/examples/fill-subdomain.cc \
+ $(srcdir)/examples/fill.cc \
+ $(srcdir)/examples/forall-piter.cc \
+ $(srcdir)/examples/fun-p2v-1.cc \
+ $(srcdir)/examples/graph-data.cc \
+ $(srcdir)/examples/graph-iter.cc \
+ $(srcdir)/examples/ima-has.cc \
+ $(srcdir)/examples/ima-load.cc \
+ $(srcdir)/examples/ima-size.cc \
+ $(srcdir)/examples/ima2d-1.cc \
+ $(srcdir)/examples/ima2d-2.cc \
+ $(srcdir)/examples/ima2d-3.cc \
+ $(srcdir)/examples/ima2d-4.cc \
+ $(srcdir)/examples/ima2d-5.cc \
+ $(srcdir)/examples/ima2d-6-clone.cc \
+ $(srcdir)/examples/ima2d-7.cc \
+ $(srcdir)/examples/ima2d-rot.cc \
+ $(srcdir)/examples/labeling-compute.cc \
+ $(srcdir)/examples/logical-not.cc \
+ $(srcdir)/examples/mln_var.cc \
+ $(srcdir)/examples/parray-append.cc \
+ $(srcdir)/examples/parray-bbox.cc \
+ $(srcdir)/examples/paste-call-1.cc \
+ $(srcdir)/examples/paste.cc \
+ $(srcdir)/examples/point-1.cc \
+ $(srcdir)/examples/predicate-1.cc \
+ $(srcdir)/examples/trash/accu.cc \
+ $(srcdir)/examples/trash/box.cc \
+ $(srcdir)/examples/trash/cpp_issue.cc \
+ $(srcdir)/examples/trash/erosion.cc \
+ $(srcdir)/examples/trash/for_Z.cc \
+ $(srcdir)/examples/trash/graph.cc \
+ $(srcdir)/examples/trash/image2d.cc \
+ $(srcdir)/examples/trash/image_flat.cc \
+ $(srcdir)/examples/trash/image_if.cc \
+ $(srcdir)/examples/trash/image_plain.cc \
+ $(srcdir)/examples/trash/image_safe.cc \
+ $(srcdir)/examples/trash/labeling_algo.cc \
+ $(srcdir)/examples/trash/mk_graph.cc \
+ $(srcdir)/examples/trash/p_array.2.cc \
+ $(srcdir)/examples/trash/p_array.cc \
+ $(srcdir)/examples/trash/p_image.cc \
+ $(srcdir)/examples/trash/p_key.2.cc \
+ $(srcdir)/examples/trash/p_key.cc \
+ $(srcdir)/examples/trash/p_line2d.cc \
+ $(srcdir)/examples/trash/p_mutable_array_of.cc \
+ $(srcdir)/examples/trash/p_queue.cc \
+ $(srcdir)/examples/trash/p_run.cc \
+ $(srcdir)/examples/trash/p_vaccess.cc \
+ $(srcdir)/examples/trash/proxy.cc \
+ $(srcdir)/examples/trash/sub_image.cc \
+ $(srcdir)/examples/trash/sub_image_if.cc \
+ $(srcdir)/examples/trash/tuto_bis.cc \
+ $(srcdir)/examples/trash/tuto_one.cc \
+ $(srcdir)/examples/trash/vec.cc \
+ $(srcdir)/examples/trash/win_multiple.cc \
+ $(srcdir)/examples/trash/window.cc \
+ $(srcdir)/examples/tuto2_first_image.cc \
+ $(srcdir)/examples/tuto3/colorize.cc \
+ $(srcdir)/examples/tuto3/first_routine.cc \
+ $(srcdir)/examples/tuto3/println.cc \
+ $(srcdir)/examples/tuto3/trace.cc \
+ $(srcdir)/examples/tuto3_rw_image.cc \
+ $(srcdir)/examples/tuto4/image.cc \
+ $(srcdir)/examples/tuto4/point2d.cc \
+ $(srcdir)/examples/tuto4/site_set_create.cc \
+ $(srcdir)/examples/tuto4_genericity_and_algorithms.cc \
+ $(srcdir)/examples/win-create-1.cc \
+ $(srcdir)/examples/win-create-2.cc
+
+# FIXME: Place split files in their own directory, e.g.: split
+# `examples/foo.cc' into `examples/split/foo.dir/foo-1.cc',
+# `examples/split/foo.dir/foo-2.cc', etc. It makes removal of these
+# files easier. We probably also nee a stamp file to keep track of
+# dependencies.
+
+# FIXME: Loops are bad, as they prevent parallelism. And we prefer
+# data-driven actions anyway. This would imply that we know in
+# advance the list out outputs (here, split examples). Maybe we can
+# infer it from the LaTeX document.
+$(srcdir)/split-examples.stamp: $(EXAMPLES)
+ @rm -f $@.tmp
+ @touch $@.tmp
+ failcom='exit 1'; \
+ for file in $(EXAMPLES); do \
+ $(DOC_SRCDIR)/tools/split_sample.sh \
+ $$file cc raw $(SPLIT_EXAMPLES_SRCDIR) \
+ || eval $$failcom; \
+ done
+ @mv -f $@.tmp $@
+
+# FIXME: Loops are bad, as they prevent parallelism. And we prefer
+# data-driven actions anyway. This would imply that we know in
+# advance the list out outputs (here, split outputs). Maybe we can
+# infer it from the LaTeX document.
+$(srcdir)/split-outputs.stamp: $(OUTPUTS)
+ @rm -f $@.tmp
+ @touch $@.tmp
+ failcom='exit 1'; \
+ for file in $(OUTPUTS); do \
+ $(DOC_SRCDIR)/tools/split_sample.sh \
+ $$file txt "" $(SPLIT_OUTPUTS_SRCDIR) \
+ || eval $$failcom; \
+ done
+ @mv -f $@.tmp $@
+
# Regen Make helpers.
EXTRA_DIST += generate_dist_files.sh headers.stamp
@@ -308,6 +975,7 @@ regen-am:
./generate_dist_files.sh outputs txt && \
./generate_dist_files.sh examples cc.raw
@mv -f $(srcdir)/headers.stamp.tmp $(srcdir)/headers.stamp
+## FIXME: Regen examples-outputs.mk as well?
$(srcdir)/examples/examples.mk: $(srcdir)/headers.stamp
$(srcdir)/outputs/outputs.mk: $(srcdir)/headers.stamp
diff --git a/milena/doc/doc.mk b/milena/doc/doc.mk
deleted file mode 100644
index 161d893..0000000
--- a/milena/doc/doc.mk
+++ /dev/null
@@ -1,42 +0,0 @@
-# -*- Automake -*-
-
-# Copyright (C) 2008, 2009, 2010 EPITA Research and Development Laboratory
-# (LRDE).
-#
-# This file is part of Olena.
-#
-# Olena is free software: you can redistribute it and/or modify it under
-# the terms of the GNU General Public License as published by the Free
-# Software Foundation, version 2 of the License.
-#
-# Olena is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-# General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with Olena. If not, see <http://www.gnu.org/licenses/>.
-
-include $(top_srcdir)/milena/tests/tests.mk
-
-AM_CPPFLAGS += -I$(top_srcdir)/milena/doc/tutorial/tools
-
-## FIXME: Upercase variables should be reserved for special names:
-## classical Make variables (e.g. `CXXFLAGS') Automake variables
-## (e.g. `SUBDIRS'), etc.
-##
-## Moreover, do we really need all these variables? See how things
-## evolve during the refurbishing of the build system in doc/.
-DOC_SRCDIR = $(top_srcdir)/milena/doc
-OUTPUTS_SRCDIR = $(DOC_SRCDIR)/outputs
-SPLIT_OUTPUTS_SRCDIR = $(OUTPUTS_SRCDIR)/split
-FIGURES_SRCDIR = $(DOC_SRCDIR)/figures
-EXAMPLES_SRCDIR = $(DOC_SRCDIR)/examples
-SPLIT_EXAMPLES_SRCDIR = $(EXAMPLES_SRCDIR)/split
-IMG_SRCDIR = $(DOC_SRCDIR)/img
-
-# FIXME: Adjust when $(DOC_SRCDIR) is renamed or removed.
-#
-# FIXME: Careful, `doc_dir' is close to `docdir', which is reserved by
-# Automake. Change this name?
-doc_dir = $(DOC_SRCDIR)
diff --git a/milena/doc/examples/make.mk b/milena/doc/examples/make.mk
deleted file mode 100644
index 91f21cb..0000000
--- a/milena/doc/examples/make.mk
+++ /dev/null
@@ -1,337 +0,0 @@
-# -*- Automake -*-
-
-# Copyright (C) 2009, 2010 EPITA Research and Development Laboratory (LRDE).
-#
-# This file is part of Olena.
-#
-# Olena is free software: you can redistribute it and/or modify it under
-# the terms of the GNU General Public License as published by the Free
-# Software Foundation, version 2 of the License.
-#
-# Olena is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-# General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with Olena. If not, see <http://www.gnu.org/licenses/>.
-
-PROGRAMS_examples = \
- accu-right-instanciation \
- borderthickness \
- box2d-bbox \
- domain-display \
- dpoint-1 \
- estim-sites \
- extend \
- extension-ignore \
- fill \
- fill-call-1 \
- fill-imageif-cfun \
- fill-part-image \
- fill-subdomain \
- fill-subdomain-shorter \
- first_routine \
- forall-piter \
- fun-p2v-1 \
- graph-data \
- graph-iter \
- ima-has \
- ima-save \
- ima-size \
- ima2d-1 \
- ima2d-2 \
- ima2d-3 \
- ima2d-4 \
- ima2d-5 \
- ima2d-6-clone \
- ima2d-7 \
- ima2d-rot \
- labeling-compute \
- logical-not \
- mln_var \
- parray-append \
- parray-bbox \
- paste \
- paste-call-1 \
- point-1 \
- predicate-1 \
- win-create-1 \
- win-create-2
-
-PROGRAMS_examples += \
- tuto2_first_image \
- tuto3_rw_image \
- tuto4_genericity_and_algorithms
-
-# Tuto3
-PROGRAMS_examples += \
- tuto3_colorize \
- tuto3_println \
- tuto3_trace
-
-# Tuto4
-PROGRAMS_examples += \
- tuto4_point2d \
- tuto4_site_set_create
-
-EXTRA_PROGRAMS = $(PROGRAMS_examples)
-CLEANFILES += $(PROGRAMS_examples)
-
-# FIXME: Careful, this will only work if this file is included by
-# `milena/doc/Makefile.am', since `examples/' is a subdirectory of
-# `milena/doc/'. Anyway, this file is to be eventually inlined in
-# `milena/doc/Makefile.am', so this is not a big deal.
-accu_right_instanciation_SOURCES = examples/accu-right-instanciation.cc
-borderthickness_SOURCES = examples/borderthickness.cc
-box2d_bbox_SOURCES = examples/box2d-bbox.cc
-domain_display_SOURCES = examples/domain-display.cc
-dpoint_1_SOURCES = examples/dpoint-1.cc
-estim_sites_SOURCES = examples/estim-sites.cc
-extend_SOURCES = examples/extend.cc
-extension_ignore_SOURCES = examples/extension-ignore.cc
-fill_SOURCES = examples/fill.cc
-fill_call_1_SOURCES = examples/fill-call-1.cc
-fill_imageif_cfun_SOURCES = examples/fill-imageif-cfun.cc
-fill_part_image_SOURCES = examples/fill-part-image.cc
-fill_subdomain_SOURCES = examples/fill-subdomain.cc
-fill_subdomain_shorter_SOURCES = examples/fill-subdomain-shorter.cc
-first_routine_SOURCES = examples/tuto3/first_routine.cc
-forall_piter_SOURCES = examples/forall-piter.cc
-fun_p2v_1_SOURCES = examples/fun-p2v-1.cc
-graph_data_SOURCES = examples/graph-data.cc
-graph_iter_SOURCES = examples/graph-iter.cc
-ima2d_1_SOURCES = examples/ima2d-1.cc
-ima2d_2_SOURCES = examples/ima2d-2.cc
-ima2d_3_SOURCES = examples/ima2d-3.cc
-ima2d_4_SOURCES = examples/ima2d-4.cc
-ima2d_5_SOURCES = examples/ima2d-5.cc
-ima2d_6_clone_SOURCES = examples/ima2d-6-clone.cc
-ima2d_7_SOURCES = examples/ima2d-7.cc
-ima2d_rot_SOURCES = examples/ima2d-rot.cc
-ima_has_SOURCES = examples/ima-has.cc
-ima_save_SOURCES = examples/ima-save.cc
-ima_size_SOURCES = examples/ima-size.cc
-labeling_compute_SOURCES = examples/labeling-compute.cc
-logical_not_SOURCES = examples/logical-not.cc
-mln_var_SOURCES = examples/mln_var.cc
-parray_append_SOURCES = examples/parray-append.cc
-parray_bbox_SOURCES = examples/parray-bbox.cc
-paste_SOURCES = examples/paste.cc
-paste_call_1_SOURCES = examples/paste-call-1.cc
-point_1_SOURCES = examples/point-1.cc
-predicate_1_SOURCES = examples/predicate-1.cc
-win_create_1_SOURCES = examples/win-create-1.cc
-win_create_2_SOURCES = examples/win-create-2.cc
-
-tuto2_first_image_SOURCES = examples/tuto2_first_image.cc
-tuto3_rw_image_SOURCES = examples/tuto3_rw_image.cc
-tuto4_genericity_and_algorithms_SOURCES = examples/tuto4_genericity_and_algorithms.cc
-
-# Tuto 3
-tuto3_colorize_SOURCES = examples/tuto3/colorize.cc
-tuto3_println_SOURCES = examples/tuto3/println.cc
-tuto3_trace_SOURCES = examples/tuto3/trace.cc
-
-# Tuto 4
-tuto4_point2d_SOURCES = examples/tuto4/point2d.cc
-tuto4_site_set_create_SOURCES = examples/tuto4/site_set_create.cc
-
-
-# FIXME: Try to avoid the use an extensive list: either use Make's
-# substitution rules or generate it. Actually this list is already
-# generated (see outputs/outputs.mk).
-OUTPUTS = \
- $(srcdir)/outputs/accu-right-instanciation.txt \
- $(srcdir)/outputs/borderthickness.txt \
- $(srcdir)/outputs/box2d-bbox.txt \
- $(srcdir)/outputs/domain-display.txt \
- $(srcdir)/outputs/dpoint-1.txt \
- $(srcdir)/outputs/estim-sites.txt \
- $(srcdir)/outputs/extend.txt \
- $(srcdir)/outputs/extension-ignore.txt \
- $(srcdir)/outputs/fill-call-1.txt \
- $(srcdir)/outputs/fill-imageif-cfun.txt \
- $(srcdir)/outputs/fill-part-image.txt \
- $(srcdir)/outputs/fill-subdomain-shorter.txt \
- $(srcdir)/outputs/fill-subdomain.txt \
- $(srcdir)/outputs/fill.txt \
- $(srcdir)/outputs/first_routine.txt \
- $(srcdir)/outputs/forall-piter.txt \
- $(srcdir)/outputs/fun-p2v-1.txt \
- $(srcdir)/outputs/graph-data.txt \
- $(srcdir)/outputs/graph-iter.txt \
- $(srcdir)/outputs/graph-output-1.txt \
- $(srcdir)/outputs/ima2d-1.txt \
- $(srcdir)/outputs/ima2d-2.txt \
- $(srcdir)/outputs/ima2d-3.txt \
- $(srcdir)/outputs/ima2d-4.txt \
- $(srcdir)/outputs/ima2d-5.txt \
- $(srcdir)/outputs/ima2d-6-clone.txt \
- $(srcdir)/outputs/ima2d-7.txt \
- $(srcdir)/outputs/ima2d-decl-2.txt \
- $(srcdir)/outputs/ima2d-display-1.txt \
- $(srcdir)/outputs/ima2d-display-2.txt \
- $(srcdir)/outputs/ima2d-display-output-1.txt \
- $(srcdir)/outputs/ima2d-display-output-2.txt \
- $(srcdir)/outputs/ima2d-rot.txt \
- $(srcdir)/outputs/ima-has.txt \
- $(srcdir)/outputs/ima-save.txt \
- $(srcdir)/outputs/ima-size.txt \
- $(srcdir)/outputs/labeling-compute.txt \
- $(srcdir)/outputs/logical-not.txt \
- $(srcdir)/outputs/mln_var.txt \
- $(srcdir)/outputs/parray-append.txt \
- $(srcdir)/outputs/parray-bbox.txt \
- $(srcdir)/outputs/parray-display-1.txt \
- $(srcdir)/outputs/paste-call-1.txt \
- $(srcdir)/outputs/paste.txt \
- $(srcdir)/outputs/point-1.txt \
- $(srcdir)/outputs/predicate-1.txt \
- $(srcdir)/outputs/tuto2_first_image.txt \
- $(srcdir)/outputs/tuto3_colorize.txt \
- $(srcdir)/outputs/tuto3_println.txt \
- $(srcdir)/outputs/tuto3_rw_image.txt \
- $(srcdir)/outputs/tuto3_trace.txt \
- $(srcdir)/outputs/tuto4_genericity_and_algorithms.txt \
- $(srcdir)/outputs/tuto4_point2d.txt \
- $(srcdir)/outputs/tuto4_site_set_create.txt \
- $(srcdir)/outputs/win-create-1-display.txt \
- $(srcdir)/outputs/win-create-1.txt \
- $(srcdir)/outputs/win-create-2.txt
-
-## FIXME: Do not forget to turn the dependency
-## $(srcdir)/examples/make.mk into $(srcdir)/Makefile.am when the
-## former is merged into the latter.
-$(DOC_SRCDIR)/examples-outputs.mk: $(DOC_SRCDIR)/gen-examples-outputs-mk $(srcdir)/examples/make.mk
- $(DOC_SRCDIR)/gen-examples-outputs-mk $(PROGRAMS_examples) >$@.tmp
- mv -f $@.tmp $@
- chmod -w $@
-include $(DOC_SRCDIR)/examples-outputs.mk
-
-MAINTAINERCLEANFILES += $(OUTPUTS)
-
-# FIXME: Warning: $(SPLIT_EXAMPLES_SRCDIR) might not exist. Ensure it
-# exists.
-
-# FIXME: Try to avoid the use an extensive list: either use Make's
-# substitution rules or generate it.
-EXAMPLES = \
- $(srcdir)/examples/ima-save.cc \
- $(srcdir)/examples/accu-right-instanciation.cc \
- $(srcdir)/examples/borderthickness.cc \
- $(srcdir)/examples/box2d-bbox.cc \
- $(srcdir)/examples/domain-display.cc \
- $(srcdir)/examples/dpoint-1.cc \
- $(srcdir)/examples/estim-sites.cc \
- $(srcdir)/examples/extend.cc \
- $(srcdir)/examples/extension-ignore.cc \
- $(srcdir)/examples/fill-call-1.cc \
- $(srcdir)/examples/fill-imageif-cfun.cc \
- $(srcdir)/examples/fill-part-image.cc \
- $(srcdir)/examples/fill-subdomain-shorter.cc \
- $(srcdir)/examples/fill-subdomain.cc \
- $(srcdir)/examples/fill.cc \
- $(srcdir)/examples/forall-piter.cc \
- $(srcdir)/examples/fun-p2v-1.cc \
- $(srcdir)/examples/graph-data.cc \
- $(srcdir)/examples/graph-iter.cc \
- $(srcdir)/examples/ima-has.cc \
- $(srcdir)/examples/ima-load.cc \
- $(srcdir)/examples/ima-size.cc \
- $(srcdir)/examples/ima2d-1.cc \
- $(srcdir)/examples/ima2d-2.cc \
- $(srcdir)/examples/ima2d-3.cc \
- $(srcdir)/examples/ima2d-4.cc \
- $(srcdir)/examples/ima2d-5.cc \
- $(srcdir)/examples/ima2d-6-clone.cc \
- $(srcdir)/examples/ima2d-7.cc \
- $(srcdir)/examples/ima2d-rot.cc \
- $(srcdir)/examples/labeling-compute.cc \
- $(srcdir)/examples/logical-not.cc \
- $(srcdir)/examples/mln_var.cc \
- $(srcdir)/examples/parray-append.cc \
- $(srcdir)/examples/parray-bbox.cc \
- $(srcdir)/examples/paste-call-1.cc \
- $(srcdir)/examples/paste.cc \
- $(srcdir)/examples/point-1.cc \
- $(srcdir)/examples/predicate-1.cc \
- $(srcdir)/examples/trash/accu.cc \
- $(srcdir)/examples/trash/box.cc \
- $(srcdir)/examples/trash/cpp_issue.cc \
- $(srcdir)/examples/trash/erosion.cc \
- $(srcdir)/examples/trash/for_Z.cc \
- $(srcdir)/examples/trash/graph.cc \
- $(srcdir)/examples/trash/image2d.cc \
- $(srcdir)/examples/trash/image_flat.cc \
- $(srcdir)/examples/trash/image_if.cc \
- $(srcdir)/examples/trash/image_plain.cc \
- $(srcdir)/examples/trash/image_safe.cc \
- $(srcdir)/examples/trash/labeling_algo.cc \
- $(srcdir)/examples/trash/mk_graph.cc \
- $(srcdir)/examples/trash/p_array.2.cc \
- $(srcdir)/examples/trash/p_array.cc \
- $(srcdir)/examples/trash/p_image.cc \
- $(srcdir)/examples/trash/p_key.2.cc \
- $(srcdir)/examples/trash/p_key.cc \
- $(srcdir)/examples/trash/p_line2d.cc \
- $(srcdir)/examples/trash/p_mutable_array_of.cc \
- $(srcdir)/examples/trash/p_queue.cc \
- $(srcdir)/examples/trash/p_run.cc \
- $(srcdir)/examples/trash/p_vaccess.cc \
- $(srcdir)/examples/trash/proxy.cc \
- $(srcdir)/examples/trash/sub_image.cc \
- $(srcdir)/examples/trash/sub_image_if.cc \
- $(srcdir)/examples/trash/tuto_bis.cc \
- $(srcdir)/examples/trash/tuto_one.cc \
- $(srcdir)/examples/trash/vec.cc \
- $(srcdir)/examples/trash/win_multiple.cc \
- $(srcdir)/examples/trash/window.cc \
- $(srcdir)/examples/tuto2_first_image.cc \
- $(srcdir)/examples/tuto3/colorize.cc \
- $(srcdir)/examples/tuto3/first_routine.cc \
- $(srcdir)/examples/tuto3/println.cc \
- $(srcdir)/examples/tuto3/trace.cc \
- $(srcdir)/examples/tuto3_rw_image.cc \
- $(srcdir)/examples/tuto4/image.cc \
- $(srcdir)/examples/tuto4/point2d.cc \
- $(srcdir)/examples/tuto4/site_set_create.cc \
- $(srcdir)/examples/tuto4_genericity_and_algorithms.cc \
- $(srcdir)/examples/win-create-1.cc \
- $(srcdir)/examples/win-create-2.cc
-
-# FIXME: Place split files in their own directory, e.g.: split
-# `examples/foo.cc' into `examples/split/foo.dir/foo-1.cc',
-# `examples/split/foo.dir/foo-2.cc', etc. It makes removal of these
-# files easier. We probably also nee a stamp file to keep track of
-# dependencies.
-
-# FIXME: Loops are bad, as they prevent parallelism. And we prefer
-# data-driven actions anyway. This would imply that we know in
-# advance the list out outputs (here, split examples). Maybe we can
-# infer it from the LaTeX document.
-$(srcdir)/split-examples.stamp: $(EXAMPLES)
- @rm -f $@.tmp
- @touch $@.tmp
- failcom='exit 1'; \
- for file in $(EXAMPLES); do \
- $(DOC_SRCDIR)/tools/split_sample.sh \
- $$file cc raw $(SPLIT_EXAMPLES_SRCDIR) \
- || eval $$failcom; \
- done
- @mv -f $@.tmp $@
-
-# FIXME: Loops are bad, as they prevent parallelism. And we prefer
-# data-driven actions anyway. This would imply that we know in
-# advance the list out outputs (here, split outputs). Maybe we can
-# infer it from the LaTeX document.
-$(srcdir)/split-outputs.stamp: $(OUTPUTS)
- @rm -f $@.tmp
- @touch $@.tmp
- failcom='exit 1'; \
- for file in $(OUTPUTS); do \
- $(DOC_SRCDIR)/tools/split_sample.sh \
- $$file txt "" $(SPLIT_OUTPUTS_SRCDIR) \
- || eval $$failcom; \
- done
- @mv -f $@.tmp $@
diff --git a/milena/doc/ref_guide/ref_guide.mk b/milena/doc/ref_guide/ref_guide.mk
deleted file mode 100644
index d776b2f..0000000
--- a/milena/doc/ref_guide/ref_guide.mk
+++ /dev/null
@@ -1,104 +0,0 @@
-# -*- Automake -*-
-
-# Copyright (C) 2009, 2010 EPITA Research and Development Laboratory (LRDE).
-#
-# This file is part of Olena.
-#
-# Olena is free software: you can redistribute it and/or modify it under
-# the terms of the GNU General Public License as published by the Free
-# Software Foundation, version 2 of the License.
-#
-# Olena is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-# General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with Olena. If not, see <http://www.gnu.org/licenses/>.
-#
-
-.PHONY: ref-guide ref-guide-html ref-guide-pdf
-
-ref_guide_TEXINPUTS = "$(DOC_SRCDIR):$(OUTPUTS_SRCDIR):\
-$(SPLIT_OUTPUTS_SRCDIR):$(IMG_SRCDIR):$(SPLIT_EXAMPLES_SRCDIR):"
-
-
-ref-guide: ref-guide-html ref-guide-pdf
-
-ref_guide_dir = $(doc_dir)/ref_guide
-
-# FIXME: Check these dependencies.
-# 1. They might be redundant.
-# 2. It'd be better to depend on actual files rather than timestamps
-# correponding to a bunch of files.
-ref_guide_dependencies = \
- $(doc_dir)/split-examples.stamp \
- $(doc_dir)/split-outputs.stamp \
- $(doc_dir)/figures.stamp
-
-# FIXME: As in milena/doc/Makefile.am, we should probably strip
-# $(srcdir) prefixes from target variables, e.g. instead of:
-#
-# FOO = $(srcdir)/foo.pdf
-# $(FOO): foo.tex bar.tex
-# dist_doc_DATA = $(FOO)
-#
-# we should use:
-#
-# FOO = foo.pdf
-# $(srcdir)/$(FOO): foo.tex bar.tex
-# dist_doc_DATA = $(FOO)
-#
-# since it minimizes installation issues (see milena/doc/Makefile.am
-# and Vaucanson's doc/Makefile.am).
-
-# FIXME: Distributed products should be generated in the source dir.
-# That's actually the case, *but* the current solution is not clean
-# and might break sometimes. The clean approach is to create a
-# temporary directory, try to generate the documentation there, and
-# move its contents to the source dir in case of success. If the
-# product is a directory, also refresh a timestamp (in the source
-# dir).
-
-# Intermediate product for the various doc targets of the parent
-# directory.
-#
-# This is not a bug: REF_GUIDE_HH is meant to have a `.hh' extension,
-# since it is later parsed by Doxygen, which complains about `.html'
-# files.
-REF_GUIDE_HH = $(ref_guide_dir)/ref_guide.hh
-ref-guide-html: $(REF_GUIDE_HH)
-$(REF_GUIDE_HH): $(ref_guide_dir)/ref_guide.tex $(ref_guide_dependencies)
- $(doc_dir)/tools/todoxygen.sh $< $(ref_guide_dir) $(doc_dir)
-
-
-# Final product.
-REF_GUIDE_PDF = $(ref_guide_dir)/ref_guide.pdf
-ref-guide-pdf: $(REF_GUIDE_PDF)
-$(REF_GUIDE_PDF): $(ref_guide_dir)/ref_guide.tex $(ref_guide_dependencies)
- TEXINPUTS=$(ref_guide_TEXINPUTS) pdflatex $<
- TEXINPUTS=$(ref_guide_TEXINPUTS) pdflatex $<
- TEXINPUTS=$(ref_guide_TEXINPUTS) pdflatex $< \
- test x"$(ref_guide_dir)" != x. \
- && mv -f ref_guide.pdf $(ref_guide_dir)
-
-dist_doc_DATA += $(REF_GUIDE_PDF)
-
-EXTRA_DIST += \
- $(ref_guide_dir)/ref_guide.tex \
- $(REF_GUIDE_HH)
-
-# FIXME: Remove `ref_guide.aux ref_guide.toc ref_guide.log
-# ref_guide.bbl ref_guide.out *blg *.lot' when texi2dvi is used.
-#
-# FIXME: Why is $(REF_GUIDE_PDF) listed here? It should be
-# automatically cleaned. I'm not sure either about the rest of
-# CLEANFILES.
-CLEANFILES += \
- ref_guide.aux ref_guide.toc ref_guide.log ref_guide.bbl ref_guide.out \
- *blg *.lot \
- $(REF_GUIDE_PDF) \
- *.haux *.hh *.html *.htoc \
- ref_guide.html \
- ref_guide.idx \
- $(REF_GUIDE_HH)
diff --git a/milena/doc/technical/technical.mk b/milena/doc/technical/technical.mk
deleted file mode 100644
index 8c5ccea..0000000
--- a/milena/doc/technical/technical.mk
+++ /dev/null
@@ -1,100 +0,0 @@
-# -*- Automake -*-
-
-# Copyright (C) 2009, 2010 EPITA Research and Development Laboratory (LRDE).
-#
-# This file is part of Olena.
-#
-# Olena is free software: you can redistribute it and/or modify it under
-# the terms of the GNU General Public License as published by the Free
-# Software Foundation, version 2 of the License.
-#
-# Olena is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-# General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with Olena. If not, see <http://www.gnu.org/licenses/>.
-#
-
-.PHONY: technical technical-html technical-pdf
-
-technical_TEXINPUTS ="$(DOC_SRCDIR):$(OUTPUTS_SRCDIR):$(srcdir):\
-$(SPLIT_OUTPUTS_SRCDIR):$(IMG_SRCDIR):$(SPLIT_EXAMPLES_SRCDIR):"
-
-
-technical: technical-html technical-pdf
-
-technical_dir = $(doc_dir)/technical
-
-# FIXME: Check these dependencies. And it'd be better to depend on
-# actual files rather than timestamps correponding to a bunch of
-# files.
-technical_dependencies = $(doc_dir)/figures.stamp
-
-# FIXME: As in milena/doc/Makefile.am, we should probably strip
-# $(srcdir) prefixes from target variables, e.g. instead of:
-#
-# FOO = $(srcdir)/foo.pdf
-# $(FOO): foo.tex bar.tex
-# dist_doc_DATA = $(FOO)
-#
-# we should use:
-#
-# FOO = foo.pdf
-# $(srcdir)/$(FOO): foo.tex bar.tex
-# dist_doc_DATA = $(FOO)
-#
-# since it minimizes installation issues (see milena/doc/Makefile.am
-# and Vaucanson's doc/Makefile.am).
-
-# FIXME: Distributed products should be generated in the source dir.
-# That's actually the case, *but* the current solution is not clean
-# and might break sometimes. The clean approach is to create a
-# temporary directory, try to generate the documentation there, and
-# move its contents to the source dir in case of success. If the
-# product is a directory, also refresh a timestamp (in the source
-# dir).
-
-# Intermediate product for the various doc targets of the parent
-# directory.
-#
-# This is not a bug: TECHNICAL_HH is meant to have a `.hh'
-# extension, since it is later parsed by Doxygen, which complains
-# about `.html' files.
-TECHNICAL_HH = $(technical_dir)/technical.hh
-technical-html: $(TECHNICAL_HH)
-$(TECHNICAL_HH): $(technical_dir)/technical.tex $(technical_dependencies)
- $(doc_dir)/tools/todoxygen.sh $< $(technical_dir) $(doc_dir)
-
-
-# Final product.
-TECHNICAL_PDF = $(technical_dir)/technical.pdf
-technical-pdf: $(TECHNICAL_PDF)
-$(TECHNICAL_PDF): $(technical_dir)/technical.tex $(technical_dependencies)
- TEXINPUTS=$(technical_TEXINPUTS) pdflatex $<
- TEXINPUTS=$(technical_TEXINPUTS) pdflatex $<
- TEXINPUTS=$(technical_TEXINPUTS) pdflatex $< \
- test "x$(technical_dir)" != x. \
- && mv -f technical.pdf $(technical_dir)
-
-dist_doc_DATA += $(TECHNICAL_PDF)
-
-EXTRA_DIST += \
- $(technical_dir)/technical.tex \
- $(TECHNICAL_HH)
-
-# FIXME: Remove `tutorial.aux tutorial.toc tutorial.log tutorial.bbl
-# tutorial.out *blg *.lot' when texi2dvi is used.
-#
-# FIXME: Why is $(TUTORIAL_PDF) listed here? It should be
-# automatically cleaned. I'm not sure either about the rest of
-# CLEANFILES.
-CLEANFILES += \
- technical.aux technical.toc technical.log technical.bbl technical.out \
- *blg *.lot \
- $(TECHNICAL_PDF) \
- *.haux *.hh *.html *.htoc \
- technical.html \
- technical.idx \
- $(TECHNICAL_HH)
diff --git a/milena/doc/tutorial/tutorial.mk b/milena/doc/tutorial/tutorial.mk
deleted file mode 100644
index 7f65365..0000000
--- a/milena/doc/tutorial/tutorial.mk
+++ /dev/null
@@ -1,103 +0,0 @@
-# -*- Automake -*-
-
-# Copyright (C) 2008, 2009, 2010 EPITA Research and Development
-# Laboratory (LRDE).
-#
-# This file is part of Olena.
-#
-# Olena is free software: you can redistribute it and/or modify it under
-# the terms of the GNU General Public License as published by the Free
-# Software Foundation, version 2 of the License.
-#
-# Olena is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-# General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with Olena. If not, see <http://www.gnu.org/licenses/>.
-#
-
-.PHONY: tutorial tutorial-html tutorial-pdf
-
-tutorial_TEXINPUTS = $(DOC_SRCDIR):$(OUTPUTS_SRCDIR):$(IMG_SRCDIR):
-
-
-tutorial: tutorial-html tutorial-pdf
-
-tutorial_dir = $(doc_dir)/tutorial
-
-# FIXME: Check these dependencies.
-# 1. They might be redundant.
-# 2. It'd be better to depend on actual files rather than timestamps
-# correponding to a bunch of files.
-tutorial_dependencies = \
- $(doc_dir)/split-examples.stamp \
- $(doc_dir)/split-outputs.stamp \
- $(doc_dir)/figures.stamp
-
-# FIXME: As in milena/doc/Makefile.am, we should probably strip
-# $(srcdir) prefixes from target variables, e.g. instead of:
-#
-# FOO = $(srcdir)/foo.pdf
-# $(FOO): foo.tex bar.tex
-# dist_doc_DATA = $(FOO)
-#
-# we should use:
-#
-# FOO = foo.pdf
-# $(srcdir)/$(FOO): foo.tex bar.tex
-# dist_doc_DATA = $(FOO)
-#
-# since it minimizes installation issues (see milena/doc/Makefile.am
-# and Vaucanson's doc/Makefile.am).
-
-# FIXME: Distributed products should be generated in the source dir.
-# That's actually the case, *but* the current solution is not clean
-# and might break sometimes. The clean approach is to create a
-# temporary directory, try to generate the documentation there, and
-# move its contents to the source dir in case of success. If the
-# product is a directory, also refresh a timestamp (in the source
-# dir).
-
-# Intermediate product for the various doc targets of the parent
-# directory.
-#
-# This is not a bug: TUTORIAL_HH is meant to have a `.hh' extension,
-# since it is later parsed by Doxygen, which complains about `.html'
-# files.
-TUTORIAL_HH = $(tutorial_dir)/tutorial.hh
-tutorial-html: $(TUTORIAL_HH)
-$(TUTORIAL_HH): $(tutorial_dir)/tutorial.tex $(tutorial_dependencies)
- $(doc_dir)/tools/todoxygen.sh $< $(tutorial_dir) $(doc_dir)
-
-# Final product.
-TUTORIAL_PDF = $(tutorial_dir)/tutorial.pdf
-tutorial-pdf: $(TUTORIAL_PDF)
-$(TUTORIAL_PDF): $(tutorial_dir)/tutorial.tex $(tutorial_dependencies)
- TEXINPUTS=$(tutorial_TEXINPUTS) pdflatex $<
- TEXINPUTS=$(tutorial_TEXINPUTS) pdflatex $<
- TEXINPUTS=$(tutorial_TEXINPUTS) pdflatex $< \
- test x"$(tutorial_dir)" != x. \
- && mv -f tutorial.pdf $(tutorial_dir)
-
-dist_doc_DATA += $(TUTORIAL_PDF)
-
-EXTRA_DIST += \
- $(tutorial_dir)/tutorial.tex \
- $(TUTORIAL_HH)
-
-# FIXME: Remove `tutorial.aux tutorial.toc tutorial.log tutorial.bbl
-# tutorial.out *blg *.lot' when texi2dvi is used.
-#
-# FIXME: Why is $(TUTORIAL_PDF) listed here? It should be
-# automatically cleaned. I'm not sure either about the rest of
-# CLEANFILES.
-CLEANFILES += \
- tutorial.aux tutorial.toc tutorial.log tutorial.bbl tutorial.out \
- *blg *.lot \
- $(TUTORIAL_PDF) \
- *.haux *.hh *.html *.htoc \
- tutorial.html \
- tutorial.idx \
- $(TUTORIAL_HH)
diff --git a/milena/doc/white_paper/white_paper.mk b/milena/doc/white_paper/white_paper.mk
deleted file mode 100644
index 0b75c44..0000000
--- a/milena/doc/white_paper/white_paper.mk
+++ /dev/null
@@ -1,103 +0,0 @@
-# -*- Automake -*-
-
-# Copyright (C) 2009, 2010 EPITA Research and Development Laboratory (LRDE).
-#
-# This file is part of Olena.
-#
-# Olena is free software: you can redistribute it and/or modify it under
-# the terms of the GNU General Public License as published by the Free
-# Software Foundation, version 2 of the License.
-#
-# Olena is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-# General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with Olena. If not, see <http://www.gnu.org/licenses/>.
-#
-
-.PHONY: white-paper white-paper-html white-paper-pdf
-
-white_paper_dir = $(doc_dir)/white_paper
-
-white_paper_TEXINPUTS = $(DOC_SRCDIR):$(white_paper_dir):
-
-PNGS = \
- $(white_paper_dir)/figures/house.png \
- $(white_paper_dir)/figures/house_rag.png \
- $(white_paper_dir)/figures/house_wshed.png \
- $(white_paper_dir)/figures/house_wshed_mean_colors.png
-
-EPSS = $(PNGS:png=eps)
-
-# FIXME: we want to check if 'figures' exists only once!
-.png.eps:
- test -d figures || mkdir figures
- convert $< $@
-
-
-white-paper: white-paper-pdf white-paper-html
-
-# FIXME: As in milena/doc/Makefile.am, we should probably strip
-# $(srcdir) prefixes from target variables, e.g. instead of:
-#
-# FOO = $(srcdir)/foo.pdf
-# $(FOO): foo.tex bar.tex
-# dist_doc_DATA = $(FOO)
-#
-# we should use:
-#
-# FOO = foo.pdf
-# $(srcdir)/$(FOO): foo.tex bar.tex
-# dist_doc_DATA = $(FOO)
-#
-# since it minimizes installation issues (see milena/doc/Makefile.am
-# and Vaucanson's doc/Makefile.am).
-
-# FIXME: Distributed products should be generated in the source dir.
-# That's actually the case, *but* the current solution is not clean
-# and might break sometimes. The clean approach is to create a
-# temporary directory, try to generate the documentation there, and
-# move its contents to the source dir in case of success. If the
-# product is a directory, also refresh a timestamp (in the source
-# dir).
-
-# FIXME: Use texi2dvi/TeX4ht instead of plain hevea.
-WHITE_PAPER_HTML = $(white_paper_dir)/white_paper_html/index.html
-white-paper-html: $(WHITE_PAPER_HTML)
-$(WHITE_PAPER_HTML): $(white_paper_dir)/white_paper.tex $(EPSS)
- test -d $(white_paper_dir)/white_paper_html \
- || mkdir $(white_paper_dir)/white_paper_html
- TEXINPUTS=$(white_paper_TEXINPUTS) hevea -O -fix $< -o $@
- $(doc_dir)/tools/clearbanner.sh $@
-
-
-WHITE_PAPER_PDF = $(white_paper_dir)/white_paper.pdf
-white-paper-pdf: $(WHITE_PAPER_PDF)
-$(WHITE_PAPER_PDF): $(white_paper_dir)/white_paper.tex $(PNGS)
- TEXINPUTS=$(white_paper_TEXINPUTS) pdflatex $<
- TEXINPUTS=$(white_paper_TEXINPUTS) pdflatex $<
- TEXINPUTS=$(white_paper_TEXINPUTS) pdflatex $< \
- test "x$(white_paper_dir)" != x. \
- && mv -f white_paper.pdf $(white_paper_dir)
-
-
-dist_doc_DATA += \
- $(WHITE_PAPER_HTML) \
- $(WHITE_PAPER_PDF)
-
-EXTRA_DIST += \
- $(white_paper_dir)/white_paper.tex \
- $(PNGS) \
- $(EPSS)
-
-# FIXME: Remove unnecessary patterns.
-CLEANFILES += \
- white_paper_image.* \
- white_paper.pdf \
- *.log *.idx *.out *.aux
-
-# FIXME: Likewise.
-clean-local:
- -rm -rf white_paper_html figures
--
1.5.6.5