4 * Description: Blocking bots based on detecting bots/crawlers/spiders via the user agent and http_from header.
6 * Author: Philipp Holzer <admin@philipp.info>
11 use Friendica\Core\Hook;
12 use Friendica\Core\System;
13 use Jaybizzle\CrawlerDetect\CrawlerDetect;
14 use Friendica\Core\Logger;
16 require_once __DIR__ . DIRECTORY_SEPARATOR . 'vendor' . DIRECTORY_SEPARATOR . 'autoload.php';
18 function blockbot_install() {
19 Hook::register('init_1', __FILE__, 'blockbot_init_1');
23 function blockbot_uninstall() {
24 Hook::unregister('init_1', __FILE__, 'blockbot_init_1');
27 function blockbot_init_1(App $a) {
28 $crawlerDetect = new CrawlerDetect();
30 // List of strings of known "good" agents
31 $agents = ['diaspora-connection-tester', 'DiasporaFederation', 'Friendica', '(compatible; zot)',
32 'Micro.blog', 'Mastodon', 'hackney', 'GangGo', 'python/federation', 'GNU social', 'winHttp',
33 'Go-http-client', 'Mr.4x3 Powered', 'Test Certificate Info', 'WordPress.com', 'zgrab',
34 'curl/', 'StatusNet', 'OpenGraphReader/', 'Uptimebot/', 'python-opengraph-jaywink'];
36 if ($crawlerDetect->isCrawler()) {
37 foreach ($agents as $agent) {
38 if (stristr($_SERVER['HTTP_USER_AGENT'], $agent)) {
39 // @ToDo: Report every false positive here: https://github.com/JayBizzle/Crawler-Detect/issues/326
40 logger::notice('False positive', ['agent' => $_SERVER['HTTP_USER_AGENT']]);
44 logger::info('Blocked bot', ['agent' => $_SERVER['HTTP_USER_AGENT']]);
45 System::httpExit(403, 'Bots are not allowed');
47 logger::debug('Good user agent detected', ['agent' => $_SERVER['HTTP_USER_AGENT']]);