diff --git a/build.sh b/build.sh index ffad0977f5..e25449ad84 100755 --- a/build.sh +++ b/build.sh @@ -8,7 +8,6 @@ npm run build rsync -ra public/ "${TARGET}/instance/static" cp dist/index.html "${TARGET}/instance/static/index.html" -cp robots.txt "${TARGET}/instance/static/robots.txt" rsync --delete -ra dist/static/ "${TARGET}/instance/static/static" rsync --delete -ra images/ "${TARGET}/instance/static/images" rsync --delete -ra sounds/ "${TARGET}/instance/static/sounds" diff --git a/public/robots.txt b/public/robots.txt index 58cb95a71f..4ca9ba0659 100644 --- a/public/robots.txt +++ b/public/robots.txt @@ -1,6 +1,4 @@ # See http://www.robotstxt.org/robotstxt.html for documentation on how to use the robots.txt file -# -# To ban all spiders from the entire site uncomment the next two lines: User-agent: * Disallow: /web Disallow: /settings @@ -11,4 +9,4 @@ Disallow: /pages/DMCA.html Disallow: /oauth Disallow: /search - + Disallow: /tag diff --git a/robots.txt b/robots.txt deleted file mode 100644 index 4ca9ba0659..0000000000 --- a/robots.txt +++ /dev/null @@ -1,12 +0,0 @@ -# See http://www.robotstxt.org/robotstxt.html for documentation on how to use the robots.txt file - User-agent: * - Disallow: /web - Disallow: /settings - Disallow: /login - Disallow: /user-search - Disallow: /registration - Disallow: /cgi-bin - Disallow: /pages/DMCA.html - Disallow: /oauth - Disallow: /search - Disallow: /tag