2001-03-10 01:08:50 +01:00
|
|
|
#!/bin/sh -x
|
|
|
|
|
2001-12-17 20:39:22 +01:00
|
|
|
# Generate HTML documentation from GCC Texinfo docs.
|
|
|
|
# This version is for GCC 3.1 and later versions.
|
|
|
|
|
2001-03-10 01:08:50 +01:00
|
|
|
# Run this from /tmp.
|
|
|
|
CVSROOT=/cvs/gcc
|
|
|
|
export CVSROOT
|
|
|
|
|
|
|
|
PATH=/usr/local/bin:$PATH
|
2001-06-04 16:12:47 +02:00
|
|
|
|
2001-03-10 01:08:50 +01:00
|
|
|
WWWBASE=/www/gcc/htdocs
|
2001-06-04 16:12:47 +02:00
|
|
|
WWWBASE_PREFORMATTED=/www/gcc/htdocs-preformatted
|
2001-08-06 15:03:21 +02:00
|
|
|
WWWPREPROCESS='/www/gcc/bin/preprocess -r'
|
2001-03-10 01:08:50 +01:00
|
|
|
|
2001-03-18 00:05:06 +01:00
|
|
|
# Process options -rrelease and -ddirectory
|
|
|
|
RELEASE=""
|
|
|
|
SUBDIR=""
|
|
|
|
|
|
|
|
while [ $# -gt 0 ]; do
|
|
|
|
case $1 in
|
|
|
|
-r*)
|
|
|
|
if [ -n "$RELEASE" ]; then
|
|
|
|
echo "Multiple releases specified" >&2
|
|
|
|
exit 1
|
|
|
|
fi
|
|
|
|
RELEASE="${1#-r}"
|
|
|
|
if [ -z "$RELEASE" ]; then
|
|
|
|
shift
|
2001-03-18 00:59:21 +01:00
|
|
|
RELEASE="$1"
|
2001-03-18 00:05:06 +01:00
|
|
|
if [ -z "$RELEASE" ]; then
|
|
|
|
echo "No release specified with -r" >&2
|
|
|
|
exit 1
|
|
|
|
fi
|
|
|
|
fi
|
|
|
|
;;
|
|
|
|
-d*)
|
|
|
|
if [ -n "$SUBDIR" ]; then
|
|
|
|
echo "Multiple subdirectories specified" >&2
|
|
|
|
exit 1
|
|
|
|
fi
|
|
|
|
SUBDIR="${1#-d}"
|
|
|
|
if [ -z "$SUBDIR" ]; then
|
|
|
|
shift
|
2001-03-18 00:59:21 +01:00
|
|
|
SUBDIR="$1"
|
2001-03-18 00:05:06 +01:00
|
|
|
if [ -z "$SUBDIR" ]; then
|
|
|
|
echo "No subdirectory specified with -d" >&2
|
|
|
|
exit 1
|
|
|
|
fi
|
|
|
|
fi
|
|
|
|
;;
|
|
|
|
*)
|
|
|
|
echo "Unknown argument \"$1\"" >&2
|
|
|
|
exit 1
|
|
|
|
;;
|
|
|
|
esac
|
|
|
|
shift
|
|
|
|
done
|
|
|
|
|
|
|
|
if [ -n "$RELEASE" ] && [ -z "$SUBDIR" ]; then
|
|
|
|
echo "Release specified without subdirectory" >&2
|
|
|
|
exit 1
|
|
|
|
fi
|
|
|
|
|
|
|
|
if [ -z "$SUBDIR" ]; then
|
|
|
|
DOCSDIR=$WWWBASE/onlinedocs
|
|
|
|
else
|
|
|
|
DOCSDIR=$WWWBASE/onlinedocs/$SUBDIR
|
|
|
|
fi
|
|
|
|
|
|
|
|
if [ ! -d $DOCSDIR ]; then
|
|
|
|
mkdir $DOCSDIR
|
|
|
|
fi
|
|
|
|
|
|
|
|
if [ -z "$RELEASE" ]; then
|
|
|
|
RELEASE=HEAD
|
|
|
|
fi
|
|
|
|
|
2001-03-10 01:08:50 +01:00
|
|
|
WORKDIR=/tmp/gcc-doc-update.$$
|
|
|
|
|
|
|
|
/bin/rm -rf $WORKDIR
|
|
|
|
/bin/mkdir $WORKDIR
|
|
|
|
cd $WORKDIR
|
|
|
|
|
|
|
|
# Find all the texi files in the repository, except those in directories
|
2001-06-07 09:27:49 +02:00
|
|
|
# we do not care about (texinfo, etc).
|
|
|
|
find $CVSROOT/gcc -name \*.texi,v -print | fgrep -v -f/home/gccadmin/scripts/doc_exclude | sed -e s#$CVSROOT/##g -e s#,v##g -e s#Attic/##g > FILES
|
2001-03-10 01:08:50 +01:00
|
|
|
|
|
|
|
|
2001-11-05 09:47:51 +01:00
|
|
|
# Checkout all the texi files.
|
2004-05-13 12:09:25 +02:00
|
|
|
cvs -Q export -r$RELEASE `cat FILES` gcc/gcc/doc/install.texi2html gcc/gcc/doc/include/texinfo.tex gcc/gcc/ada/xgnatugn.adb gcc/gcc/ada/ug_words
|
2004-01-03 22:16:13 +01:00
|
|
|
|
|
|
|
# Build a tarball of the sources.
|
|
|
|
tar cf docs-sources.tar gcc
|
2001-11-05 09:47:51 +01:00
|
|
|
|
2001-12-17 20:39:22 +01:00
|
|
|
# The directory to pass to -I; this is the one with texinfo.tex
|
2001-11-05 09:47:51 +01:00
|
|
|
# and fdl.texi.
|
2001-12-17 20:39:22 +01:00
|
|
|
includedir=gcc/gcc/doc/include
|
|
|
|
|
2004-05-13 12:14:00 +02:00
|
|
|
MANUALS="cpp cppinternals fastjar gcc gccint gcj g77 gfortran gnat_ug_unx gnat_ug_vms gnat_ug_vxw gnat_ug_wnt gnat_ugn_unw gnat-style gnat_rm libiberty porting"
|
2004-05-13 12:09:25 +02:00
|
|
|
|
|
|
|
# Generate gnat_ugn_unw
|
|
|
|
|
|
|
|
if [ -f gcc/gcc/ada/xgnatugn.adb ]; then
|
|
|
|
gnatmake -q gcc/gcc/ada/xgnatugn
|
|
|
|
./xgnatugn unw gcc/gcc/ada/gnat_ugn.texi \
|
|
|
|
gcc/gcc/ada/ug_words gnat_ugn_unw.texi
|
|
|
|
fi
|
2001-03-10 01:08:50 +01:00
|
|
|
|
2001-06-07 09:27:49 +02:00
|
|
|
# Now convert the relevant files from texi to HTML and PostScript.
|
2001-12-17 20:39:22 +01:00
|
|
|
for file in $MANUALS; do
|
2001-11-05 09:47:51 +01:00
|
|
|
filename=`find . -name ${file}.texi`
|
|
|
|
if [ "${filename}" ]; then
|
2001-12-17 20:39:22 +01:00
|
|
|
makeinfo --html -I ${includedir} -I `dirname ${filename}` ${filename}
|
2004-01-03 22:16:13 +01:00
|
|
|
tar cf ${file}-html.tar ${file}/*.html
|
2001-11-05 09:47:51 +01:00
|
|
|
texi2dvi -I ${includedir} ${filename} </dev/null && dvips -o ${file}.ps ${file}.dvi
|
2001-12-17 20:39:22 +01:00
|
|
|
mkdir -p $DOCSDIR/$file
|
2001-05-03 17:49:22 +02:00
|
|
|
fi
|
2001-03-10 01:08:50 +01:00
|
|
|
done
|
|
|
|
|
2004-01-03 22:16:13 +01:00
|
|
|
# Then build a gzipped copy of each of the resulting .html, .ps and .tar files
|
|
|
|
for file in */*.html *.ps *.tar; do
|
2001-03-10 01:08:50 +01:00
|
|
|
cat $file | gzip --best > $file.gz
|
|
|
|
done
|
|
|
|
|
|
|
|
# On the 15th of the month, wipe all the old files from the
|
|
|
|
# web server.
|
|
|
|
today=`date +%d`
|
|
|
|
if test $today = 15; then
|
2001-03-18 00:05:06 +01:00
|
|
|
find $DOCSDIR -type f -maxdepth 1 -print | grep -v index.html | xargs rm
|
2001-12-17 20:39:22 +01:00
|
|
|
for m in $MANUALS; do
|
2004-05-13 12:30:28 +02:00
|
|
|
rm $DOCSDIR/$m/*.html $DOCSDIR/$m/*.html.gz
|
2001-12-17 20:39:22 +01:00
|
|
|
done
|
2001-03-10 01:08:50 +01:00
|
|
|
fi
|
|
|
|
|
2004-01-03 22:16:13 +01:00
|
|
|
# And copy the resulting files to the web server
|
|
|
|
for file in */*.html *.ps *.tar; do
|
2001-03-18 00:05:06 +01:00
|
|
|
cat $DOCSDIR/$file |
|
2001-12-17 20:39:22 +01:00
|
|
|
sed -e '/^<meta name=generator/d' \
|
2001-06-07 09:27:49 +02:00
|
|
|
-e '/^%DVIPSSource:/d' > file1
|
2001-03-10 01:08:50 +01:00
|
|
|
cat $file |
|
2001-12-17 20:39:22 +01:00
|
|
|
sed -e '/^<meta name=generator/d' \
|
2001-06-07 09:27:49 +02:00
|
|
|
-e '/^%DVIPSSource:/d' > file2
|
2001-03-10 01:08:50 +01:00
|
|
|
if cmp -s file1 file2; then
|
|
|
|
:
|
|
|
|
else
|
2001-12-17 20:39:22 +01:00
|
|
|
cp $file $DOCSDIR/$file
|
|
|
|
cp $file.gz $DOCSDIR/$file.gz
|
2001-03-10 01:08:50 +01:00
|
|
|
fi
|
|
|
|
done
|
|
|
|
|
2001-03-18 00:05:06 +01:00
|
|
|
cd $DOCSDIR
|
2001-03-10 01:08:50 +01:00
|
|
|
|
2001-06-08 08:27:47 +02:00
|
|
|
# Finally, generate the installation documentation (but only for CVS HEAD).
|
|
|
|
if [ "$RELEASE" = "HEAD" ]; then
|
2001-11-06 18:59:19 +01:00
|
|
|
SOURCEDIR=$WORKDIR/gcc/gcc/doc
|
2001-06-08 08:27:47 +02:00
|
|
|
DESTDIR=$WWWBASE_PREFORMATTED/install
|
|
|
|
export SOURCEDIR
|
|
|
|
export DESTDIR
|
|
|
|
$WORKDIR/gcc/gcc/doc/install.texi2html
|
|
|
|
|
2001-08-06 15:03:21 +02:00
|
|
|
# Preprocess the entire web site, not just the install docs!
|
|
|
|
echo "Invoking $WWWPREPROCESS"
|
2001-11-05 10:55:25 +01:00
|
|
|
$WWWPREPROCESS |grep -v '^ Warning: Keeping'
|
2001-06-08 08:27:47 +02:00
|
|
|
fi
|
2001-06-04 16:12:47 +02:00
|
|
|
|
|
|
|
# Clean up behind us.
|
|
|
|
|
2001-03-10 01:08:50 +01:00
|
|
|
rm -rf $WORKDIR
|