added crawler restrictions as long as site not officially published

This commit is contained in:
JKlingen 2015-03-17 22:20:07 +01:00
commit 92eca18a38
2 changed files with 8 additions and 1 deletions

View file

@ -1,5 +1,9 @@
<head> <head>
<meta charset="utf-8"> <meta charset="utf-8">
<!-- TODO remove before publication on getgreenshot.org, also remove robots.txt -->
<meta name="robots" content="noindex, nofollow">
<meta http-equiv="X-UA-Compatible" content="IE=edge,chrome=1"> <meta http-equiv="X-UA-Compatible" content="IE=edge,chrome=1">
<title>{% if page.title %}{{ page.title }}{% else %}{{ site.title }}{% endif %}</title> <title>{% if page.title %}{{ page.title }}{% else %}{{ site.title }}{% endif %}</title>
<meta name="description" content="{{ site.description }}"/> <meta name="description" content="{{ site.description }}"/>
@ -17,4 +21,4 @@
<script src="{{ '/js/vendor/jquery-1.11.1.min.js' | prepend: site.baseurl }}"></script> <script src="{{ '/js/vendor/jquery-1.11.1.min.js' | prepend: site.baseurl }}"></script>
<script src="{{ '/js/vendor/jquery.slides.min.js' | prepend: site.baseurl }}"></script> <script src="{{ '/js/vendor/jquery.slides.min.js' | prepend: site.baseurl }}"></script>
</head> </head>

3
robots.txt Normal file
View file

@ -0,0 +1,3 @@
# TODO remove before publication on getgreenshot.org, also meta header in head.html
User-agent: *
Disallow: /