enabled: Whether to run in "single user mode". Default false.
nickname: nickname of the single user.
+robotstxt
+---------
+
+We put out a default robots.txt file to guide the processing of
+Web crawlers. See http://www.robotstxt.org/ for more information
+on the format of this file.
+
+crawldelay: if non-empty, this value is provided as the Crawl-Delay:
+ for the robots.txt file. see http://ur1.ca/l5a0
+ for more information. Default is zero, no explicit delay.
+disallow: Array of (virtual) directories to disallow. Default is 'main',
+ 'search', 'message', 'settings', 'admin'. Ignored when site
+ is private, in which case the entire site ('/') is disallowed.
+
Plugins
=======
--- /dev/null
+<?php
+/**
+ * StatusNet - the distributed open-source microblogging tool
+ * Copyright (C) 2010, StatusNet, Inc.
+ *
+ * robots.txt generator
+ *
+ * PHP version 5
+ *
+ * @category Action
+ * @package StatusNet
+ * @author Evan Prodromou <evan@status.net>
+ * @license http://www.fsf.org/licensing/licenses/agpl.html AGPLv3
+ * @link http://status.net/
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Affero General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+if (!defined('STATUSNET')) {
+ exit(1);
+}
+
+/**
+ * Prints out a static robots.txt
+ *
+ * @category Action
+ * @package StatusNet
+ * @author Evan Prodromou <evan@status.net>
+ * @license http://www.fsf.org/licensing/licenses/agpl.html AGPLv3
+ * @link http://status.net/
+ */
+
+class RobotstxtAction extends Action
+{
+ /**
+ * Handles requests
+ *
+ * Since this is a relatively static document, we
+ * don't do a prepare()
+ *
+ * @param array $args GET, POST, and URL params; unused.
+ *
+ * @return void
+ */
+
+ function handle($args)
+ {
+ if (Event::handle('StartRobotsTxt', array($this))) {
+
+ header('Content-Type: text/plain');
+
+ print "User-Agent: *\n";
+
+ if (common_config('site', 'private')) {
+
+ print "Disallow: /\n";
+
+ } else {
+
+ $disallow = common_config('robotstxt', 'disallow');
+
+ foreach ($disallow as $dir) {
+ print "Disallow: /$dir/\n";
+ }
+
+ $crawldelay = common_config('robotstxt', 'crawldelay');
+
+ if (!empty($crawldelay)) {
+ print "Crawl-delay: " . $crawldelay . "\n";
+ }
+ }
+
+ Event::handle('EndRobotsTxt', array($this));
+ }
+ }
+
+ /**
+ * Return true; this page doesn't touch the DB.
+ *
+ * @param array $args other arguments
+ *
+ * @return boolean is read only action?
+ */
+
+ function isReadOnly($args)
+ {
+ return true;
+ }
+}
if (!$user && common_config('site', 'private')
&& !isLoginAction($action)
&& !preg_match('/rss$/', $action)
- && !preg_match('/^Api/', $action)
- ) {
+ && $action != 'robotstxt'
+ && !preg_match('/^Api/', $action)) {
+
// set returnto
$rargs =& common_copy_args($args);
unset($rargs['action']);