Allow page-specific robots meta directive

This commit is contained in:
jklingen 2015-04-03 18:30:36 +02:00
commit 8277d32c0b
3 changed files with 6 additions and 4 deletions

View file

@ -11,6 +11,9 @@ facebook: Greenshot.Tool
googleplus: 108405649004966849465
openhub_project: greenshot
# TODO change to index,follow before going live, also remove robots.txt
metarobots: noindex, nofollow
collections:
faqs:
output: true

View file

@ -1,8 +1,7 @@
<head>
<meta charset="utf-8">
<!-- TODO remove before publication on getgreenshot.org, also remove robots.txt -->
<meta name="robots" content="noindex, nofollow">
<meta name="robots" content="{% if page.metarobots %}{{page.metarobots}}{% else %}{{ site.metarobots }}{% endif %}">
<meta http-equiv="X-UA-Compatible" content="IE=edge,chrome=1">
<title>{% if page.metatitle %}{{ page.metatitle }}{% else if page.title %}{{ page.title }}{% else %}{{ site.title }}{% endif %}</title>

View file

@ -1,3 +1,3 @@
# TODO remove before publication on getgreenshot.org, also meta header in head.html
# TODO remove before publication on getgreenshot.org, also change meta directive in _config.yml
User-agent: *
Disallow: /