update_web_docs_svn: Remove unnecessary files from source tree.
* update_web_docs_svn: Remove unnecessary files from source tree. Use set -e. * doc_exclude, update_web_docs, update_web_docs_libstdcxx: Remove. From-SVN: r108698
This commit is contained in:
parent
0157856478
commit
f0ac2193cb
@ -1,3 +1,9 @@
|
||||
2005-12-16 Joseph S. Myers <joseph@codesourcery.com>
|
||||
|
||||
* update_web_docs_svn: Remove unnecessary files from source tree.
|
||||
Use set -e.
|
||||
* doc_exclude, update_web_docs, update_web_docs_libstdcxx: Remove.
|
||||
|
||||
2005-12-16 Mark Mitchell <mark@codesourcery.com>
|
||||
|
||||
* update_web_docs_svn: Set @srcdir in gcc-vers.texi.
|
||||
|
@ -1,2 +0,0 @@
|
||||
texinfo
|
||||
etc
|
@ -1,180 +0,0 @@
|
||||
#!/bin/sh -x
|
||||
|
||||
# Generate HTML documentation from GCC Texinfo docs.
|
||||
# This version is for GCC 3.1 and later versions.
|
||||
|
||||
# Run this from /tmp.
|
||||
CVSROOT=/cvs/gcc
|
||||
export CVSROOT
|
||||
|
||||
PATH=/usr/local/bin:$PATH
|
||||
|
||||
WWWBASE=/www/gcc/htdocs
|
||||
WWWBASE_PREFORMATTED=/www/gcc/htdocs-preformatted
|
||||
WWWPREPROCESS='/www/gcc/bin/preprocess -r'
|
||||
|
||||
# Process options -rrelease and -ddirectory
|
||||
RELEASE=""
|
||||
SUBDIR=""
|
||||
|
||||
while [ $# -gt 0 ]; do
|
||||
case $1 in
|
||||
-r*)
|
||||
if [ -n "$RELEASE" ]; then
|
||||
echo "Multiple releases specified" >&2
|
||||
exit 1
|
||||
fi
|
||||
RELEASE="${1#-r}"
|
||||
if [ -z "$RELEASE" ]; then
|
||||
shift
|
||||
RELEASE="$1"
|
||||
if [ -z "$RELEASE" ]; then
|
||||
echo "No release specified with -r" >&2
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
;;
|
||||
-d*)
|
||||
if [ -n "$SUBDIR" ]; then
|
||||
echo "Multiple subdirectories specified" >&2
|
||||
exit 1
|
||||
fi
|
||||
SUBDIR="${1#-d}"
|
||||
if [ -z "$SUBDIR" ]; then
|
||||
shift
|
||||
SUBDIR="$1"
|
||||
if [ -z "$SUBDIR" ]; then
|
||||
echo "No subdirectory specified with -d" >&2
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
;;
|
||||
*)
|
||||
echo "Unknown argument \"$1\"" >&2
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
shift
|
||||
done
|
||||
|
||||
if [ -n "$RELEASE" ] && [ -z "$SUBDIR" ]; then
|
||||
echo "Release specified without subdirectory" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ -z "$SUBDIR" ]; then
|
||||
DOCSDIR=$WWWBASE/onlinedocs
|
||||
else
|
||||
DOCSDIR=$WWWBASE/onlinedocs/$SUBDIR
|
||||
fi
|
||||
|
||||
if [ ! -d $DOCSDIR ]; then
|
||||
mkdir $DOCSDIR
|
||||
fi
|
||||
|
||||
if [ -z "$RELEASE" ]; then
|
||||
RELEASE=HEAD
|
||||
fi
|
||||
|
||||
WORKDIR=/tmp/gcc-doc-update.$$
|
||||
|
||||
/bin/rm -rf $WORKDIR
|
||||
/bin/mkdir $WORKDIR
|
||||
cd $WORKDIR
|
||||
|
||||
# Find all the texi files in the repository, except those in directories
|
||||
# we do not care about (texinfo, etc).
|
||||
find $CVSROOT/gcc -name \*.texi,v -print | fgrep -v -f/home/gccadmin/scripts/doc_exclude | sed -e s#$CVSROOT/##g -e s#,v##g -e s#Attic/##g > FILES
|
||||
|
||||
|
||||
# Checkout all the texi files.
|
||||
cvs -Q export -r$RELEASE `cat FILES` gcc/gcc/doc/install.texi2html gcc/gcc/doc/include/texinfo.tex gcc/gcc/ada/xgnatugn.adb gcc/gcc/ada/ug_words gcc/gcc/BASE-VER gcc/gcc/DEV-PHASE
|
||||
|
||||
# Build a tarball of the sources.
|
||||
tar cf docs-sources.tar gcc
|
||||
|
||||
# The directory to pass to -I; this is the one with texinfo.tex
|
||||
# and fdl.texi.
|
||||
includedir=gcc/gcc/doc/include
|
||||
|
||||
MANUALS="cpp cppinternals fastjar gcc gccint gcj g77 gfortran gnat_ug_unx gnat_ug_vms gnat_ug_vxw gnat_ug_wnt gnat_ugn_unw gnat-style gnat_rm libiberty porting"
|
||||
|
||||
# Generate gnat_ugn_unw
|
||||
|
||||
if [ -f gcc/gcc/ada/xgnatugn.adb ]; then
|
||||
gnatmake -q gcc/gcc/ada/xgnatugn
|
||||
./xgnatugn unw gcc/gcc/ada/gnat_ugn.texi \
|
||||
gcc/gcc/ada/ug_words gnat_ugn_unw.texi
|
||||
fi
|
||||
|
||||
# Generate gcc-vers.texi.
|
||||
(
|
||||
echo "@set version-GCC $(cat gcc/gcc/BASE-VER)"
|
||||
if [ "$(cat gcc/gcc/DEV-PHASE)" = "experimental" ]; then
|
||||
echo "@set DEVELOPMENT"
|
||||
else
|
||||
echo "@clear DEVELOPMENT"
|
||||
fi
|
||||
) > $includedir/gcc-vers.texi
|
||||
|
||||
# Now convert the relevant files from texi to HTML, PDF and PostScript.
|
||||
for file in $MANUALS; do
|
||||
filename=`find . -name ${file}.texi`
|
||||
if [ "${filename}" ]; then
|
||||
makeinfo --html -I ${includedir} -I `dirname ${filename}` ${filename}
|
||||
tar cf ${file}-html.tar ${file}/*.html
|
||||
texi2dvi -I ${includedir} ${filename} </dev/null && dvips -o ${file}.ps ${file}.dvi
|
||||
texi2pdf -I ${includedir} ${filename} </dev/null
|
||||
mkdir -p $DOCSDIR/$file
|
||||
fi
|
||||
done
|
||||
|
||||
# Then build a gzipped copy of each of the resulting .html, .ps and .tar files
|
||||
for file in */*.html *.ps *.pdf *.tar; do
|
||||
cat $file | gzip --best > $file.gz
|
||||
done
|
||||
|
||||
# On the 15th of the month, wipe all the old files from the
|
||||
# web server.
|
||||
today=`date +%d`
|
||||
if test $today = 15; then
|
||||
find $DOCSDIR -type f -maxdepth 1 -print | grep -v index.html | xargs rm
|
||||
for m in $MANUALS; do
|
||||
rm $DOCSDIR/$m/*.html $DOCSDIR/$m/*.html.gz
|
||||
done
|
||||
fi
|
||||
|
||||
# And copy the resulting files to the web server
|
||||
for file in */*.html *.ps *.pdf *.tar; do
|
||||
cat $DOCSDIR/$file |
|
||||
sed -e '/^<meta name=generator/d' \
|
||||
-e '/^%DVIPSSource:/d' > file1
|
||||
cat $file |
|
||||
sed -e '/^<meta name=generator/d' \
|
||||
-e '/^%DVIPSSource:/d' > file2
|
||||
if cmp -s file1 file2; then
|
||||
:
|
||||
else
|
||||
cp $file $DOCSDIR/$file
|
||||
cp $file.gz $DOCSDIR/$file.gz
|
||||
fi
|
||||
done
|
||||
|
||||
cd $DOCSDIR
|
||||
|
||||
# Finally, generate the installation documentation (but only for CVS HEAD).
|
||||
if [ "$RELEASE" = "HEAD" ]; then
|
||||
SOURCEDIR=$WORKDIR/gcc/gcc/doc
|
||||
DESTDIR=$WWWBASE_PREFORMATTED/install
|
||||
export SOURCEDIR
|
||||
export DESTDIR
|
||||
$WORKDIR/gcc/gcc/doc/install.texi2html
|
||||
|
||||
# Preprocess the entire web site, not just the install docs!
|
||||
echo "Invoking $WWWPREPROCESS"
|
||||
$WWWPREPROCESS |grep -v '^ Warning: Keeping'
|
||||
fi
|
||||
|
||||
# Clean up behind us.
|
||||
|
||||
rm -rf $WORKDIR
|
@ -1,54 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
|
||||
# "sh update_web_docs_libstdcxx.sh"
|
||||
# Checks out a copy of the libstdc++-v3 "inner" documentation and puts
|
||||
# it in the onlinedocs area. For an initial description of "inner"
|
||||
# docs, see the thread starting with
|
||||
# http://gcc.gnu.org/ml/libstdc++/2000-11/msg00475.html
|
||||
#
|
||||
# Id: update_v3_web_docs.sh,v 1.4 2000/12/25 05:02:14 pedwards Exp
|
||||
#####################################################################
|
||||
|
||||
CVSROOT=/cvs/gcc
|
||||
GETTHIS='gcc/libstdc++-v3/docs/html'
|
||||
WWWDIR=/www/gcc/htdocs/onlinedocs/libstdc++
|
||||
#WWWDIR=/tmp/fake-onlinedocs-testing
|
||||
|
||||
|
||||
## No more changes should be needed. Ha, right, whatever.
|
||||
#####################################################################
|
||||
|
||||
|
||||
PATH=/usr/local/bin:$PATH
|
||||
export CVSROOT
|
||||
|
||||
test -d $WWWDIR || /bin/mkdir $WWWDIR
|
||||
test -d $WWWDIR || { echo something is very wrong ; exit 1; }
|
||||
|
||||
WORKDIR=/tmp/v3-doc-update.$$
|
||||
/bin/rm -rf $WORKDIR
|
||||
/bin/mkdir $WORKDIR
|
||||
cd $WORKDIR
|
||||
|
||||
|
||||
# checkout all the HTML files, get down into an interesting directory
|
||||
cvs -Q co $GETTHIS
|
||||
cd $GETTHIS
|
||||
|
||||
# pity the cvs 'export' would require a tag... maybe gcc_latest_snapshot
|
||||
# would be recent enough? anyhow, get rid of the CVS dirs in the tree
|
||||
find . -type d -name CVS -print | xargs rm -r Makefile
|
||||
|
||||
# build a compressed copy of the HTML, preserve directory structure
|
||||
for file in `find . -name "*.html" -print`; do
|
||||
gzip --best < $file > $file.gz
|
||||
done
|
||||
|
||||
# copy the tree to the onlinedocs area, preserve directory structure
|
||||
#find . -depth -print | cpio -pdv $WWWDIR
|
||||
find . -depth -print | cpio -pd $WWWDIR > /dev/null 2>&1
|
||||
|
||||
cd /
|
||||
/bin/rm -rf $WORKDIR
|
||||
|
@ -3,6 +3,8 @@
|
||||
# Generate HTML documentation from GCC Texinfo docs.
|
||||
# This version is for GCC 3.1 and later versions.
|
||||
|
||||
set -e
|
||||
|
||||
# Run this from /tmp.
|
||||
SVNROOT=${SVNROOT:-"file:///svn/gcc"}
|
||||
export SVNROOT
|
||||
@ -78,43 +80,28 @@ fi
|
||||
|
||||
WORKDIR=/tmp/gcc-doc-update.$$
|
||||
|
||||
/bin/rm -rf $WORKDIR
|
||||
/bin/mkdir $WORKDIR
|
||||
rm -rf $WORKDIR
|
||||
mkdir $WORKDIR
|
||||
cd $WORKDIR
|
||||
# Find all the texi files in the repository, except those in directories
|
||||
# we do not care about (texinfo, etc).
|
||||
if [ "$RELEASE" = "trunk" ]; then
|
||||
svn ls -R $SVNROOT/$RELEASE/gcc | grep "\.texi$" |fgrep -v -f/home/gccadmin/scripts/doc_exclude > FILES
|
||||
# SVN export doesn't function like CVS, in that it doesn't create an entire empty path structure
|
||||
# Thus, it's easiest to just export the entire tree
|
||||
svn -q export $SVNROOT/$RELEASE gcc
|
||||
# # Checkout all the texi files.
|
||||
# for i in `cat FILES`; do
|
||||
# svn -q export $SVNROOT/$RELEASE/gcc/$i
|
||||
# done
|
||||
# svn -q export $SVNROOT/$RELEASE/gcc/doc/install.texi2html
|
||||
# svn -q export $SVNROOT/$RELEASE/gcc/doc/include/texinfo.tex
|
||||
# svn -q export $SVNROOT/$RELEASE/gcc/ada/xgnatugn.adb
|
||||
# svn -q export $SVNROOT/$RELEASE/gcc/ada/ug_words
|
||||
# svn -q export $SVNROOT/$RELEASE/gcc/BASE-VER
|
||||
# svn -q export $SVNROOT/$RELEASE/gcc/DEV-PHASE
|
||||
else
|
||||
svn ls -R $SVNROOT/tags/$RELEASE/gcc | grep "\.texi$" |fgrep -v -f/home/gccadmin/scripts/doc_exclude > FILES
|
||||
# SVN export doesn't function like CVS, in that it doesn't create an entire empty path structure
|
||||
# Thus, it's easiest to just export the entire tree
|
||||
svn -q export $SVNROOT/tags/$RELEASE gcc
|
||||
# # Checkout all the texi files.
|
||||
# for i in `cat FILES`; do
|
||||
# svn -q export $SVNROOT/tags/$RELEASE/gcc/$i
|
||||
# done
|
||||
# svn -q export $SVNROOT/tags/$RELEASE/gcc/doc/install.texi2html
|
||||
# svn -q export $SVNROOT/tags/$RELEASE/gcc/doc/include/texinfo.tex
|
||||
# svn -q export $SVNROOT/tags/$RELEASE/gcc/ada/xgnatugn.adb
|
||||
# svn -q export $SVNROOT/tags/$RELEASE/gcc/ada/ug_words
|
||||
# svn -q export $SVNROOT/tags/$RELEASE/gcc/BASE-VER
|
||||
# svn -q export $SVNROOT/tags/$RELEASE/gcc/DEV-PHASE
|
||||
fi
|
||||
|
||||
# Remove all unwanted files. This is needed (a) to build the Ada
|
||||
# generator programs with the installed library, not the new one and
|
||||
# (b) to avoid packaging all the sources instead of only documentation
|
||||
# sources.
|
||||
find gcc -type f \( -name '*.texi' \
|
||||
-o -path gcc/gcc/doc/install.texi2html \
|
||||
-o -path gcc/gcc/doc/include/texinfo.tex \
|
||||
-o -path gcc/gcc/ada/xgnatugn.adb \
|
||||
-o -path gcc/gcc/ada/ug_words \
|
||||
-o -path gcc/gcc/BASE-VER \
|
||||
-o -path gcc/gcc/DEV-PHASE \
|
||||
-o -print0 \) | xargs -0 rm -f
|
||||
|
||||
# Build a tarball of the sources.
|
||||
tar cf docs-sources.tar gcc
|
||||
|
||||
@ -166,15 +153,17 @@ today=`date +%d`
|
||||
if test $today = 15; then
|
||||
find $DOCSDIR -type f -maxdepth 1 -print | grep -v index.html | xargs rm
|
||||
for m in $MANUALS; do
|
||||
rm $DOCSDIR/$m/*.html $DOCSDIR/$m/*.html.gz
|
||||
rm -f $DOCSDIR/$m/*.html $DOCSDIR/$m/*.html.gz
|
||||
done
|
||||
fi
|
||||
|
||||
# And copy the resulting files to the web server
|
||||
for file in */*.html *.ps *.pdf *.tar; do
|
||||
cat $DOCSDIR/$file |
|
||||
sed -e '/^<meta name=generator/d' \
|
||||
-e '/^%DVIPSSource:/d' > file1
|
||||
if [ -f $DOCSDIR/$file ]; then
|
||||
cat $DOCSDIR/$file |
|
||||
sed -e '/^<meta name=generator/d' \
|
||||
-e '/^%DVIPSSource:/d' > file1
|
||||
fi
|
||||
cat $file |
|
||||
sed -e '/^<meta name=generator/d' \
|
||||
-e '/^%DVIPSSource:/d' > file2
|
||||
|
Loading…
Reference in New Issue
Block a user