2024-04-15 12:14:13 +02:00
|
|
|
<?php
|
|
|
|
|
2024-04-16 14:04:16 +02:00
|
|
|
abstract class PhabricatorRobotsController extends PhabricatorController {
|
2024-04-15 12:14:13 +02:00
|
|
|
|
|
|
|
public function shouldRequireLogin() {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
public function processRequest() {
|
|
|
|
$out = array();
|
|
|
|
|
2024-04-16 14:17:47 +02:00
|
|
|
$out[] = '# Forked from phabricator.wikimedia.org, we.phorge.it';
|
2024-04-16 18:35:10 +02:00
|
|
|
// Version timestamp is when I started editing them.
|
2024-04-20 21:36:07 +02:00
|
|
|
$out[] = '# version: 20240421T042000+0900';
|
2024-04-16 14:17:47 +02:00
|
|
|
$out[] = '# also at https://github.com/revi/sandbox.git';
|
2024-04-15 12:14:13 +02:00
|
|
|
$out[] = 'User-Agent: *';
|
2024-04-16 14:04:16 +02:00
|
|
|
$out[] = 'Disallow: /diffusion/';
|
|
|
|
$out[] = 'Disallow: /source/';
|
|
|
|
$out[] = 'Disallow: /multimeter/';
|
|
|
|
$out[] = 'Disallow: /file/';
|
|
|
|
$out[] = 'Disallow: /policy/explain';
|
|
|
|
$out[] = 'Disallow: /auth';
|
|
|
|
$out[] = 'Disallow: /login';
|
|
|
|
$out[] = 'Disallow: /maniphest/transaction';
|
|
|
|
$out[] = 'Disallow: /tag';
|
|
|
|
$out[] = 'Disallow: /search/query/all';
|
|
|
|
$out[] = 'Disallow: /conduit';
|
|
|
|
$out[] = 'Disallow: /api';
|
|
|
|
$out[] = 'Disallow: /project';
|
|
|
|
$out[] = 'Disallow: /applications';
|
|
|
|
$out[] = 'Disallow: /token';
|
|
|
|
$out[] = 'Disallow: /pholio';
|
|
|
|
$out[] = 'Disallow: /dashboard';
|
|
|
|
$out[] = 'Disallow: /calendar';
|
|
|
|
$out[] = 'Disallow: /herald';
|
2024-04-16 18:35:10 +02:00
|
|
|
// This is commits.
|
2024-04-16 14:04:16 +02:00
|
|
|
$out[] = 'Disallow: /r*';
|
2024-04-16 18:35:10 +02:00
|
|
|
// This is pastes (P$)
|
2024-04-16 14:04:16 +02:00
|
|
|
$out[] = 'Disallow: /P*%24*';
|
2024-04-16 18:35:10 +02:00
|
|
|
$out[] = 'Disallow: /phame';
|
|
|
|
// This is blog entries (J$)
|
2024-04-16 18:42:05 +02:00
|
|
|
$out[] = 'Disallow: /J*%24*';
|
2024-04-16 18:35:10 +02:00
|
|
|
// This is user list.
|
|
|
|
// As of 2024-04-17 user list is behind auth but who knows it might change?
|
2024-04-16 18:50:42 +02:00
|
|
|
$out[] = 'Disallow: /people';
|
2024-04-16 18:35:10 +02:00
|
|
|
// This is user profile link.
|
|
|
|
$out[] = 'Disallow: /p/';
|
|
|
|
// Phorge specific entries end here.
|
2024-04-16 14:04:16 +02:00
|
|
|
$out[] = '# This is cloudflare endpoint';
|
2024-04-16 14:17:47 +02:00
|
|
|
$out[] = '# Ref: https://developers.cloudflare.com/fundamentals/reference/cdn-cgi-endpoint/';
|
2024-04-16 14:04:16 +02:00
|
|
|
$out[] = 'Disallow: /cdn-cgi/';
|
|
|
|
$out[] = '# Google Ads are not welcome';
|
|
|
|
$out[] = 'User-agent: Mediapartners-Google';
|
|
|
|
$out[] = 'Disallow: /';
|
|
|
|
$out[] = 'User-agent: AdsBot-Google';
|
|
|
|
$out[] = 'Disallow: /';
|
|
|
|
$out[] = 'User-agent: AdsBot-Google-Mobile';
|
|
|
|
$out[] = 'Disallow: /';
|
2024-04-20 21:36:07 +02:00
|
|
|
// While I sometimes (borderline 'rare') use LLMs (GPT, Gemini, …), I'd rather prefer LLMs not use my stuff to profit
|
|
|
|
// Well I think my stuff is mostly out of interest for them, tho…
|
2024-04-16 14:04:16 +02:00
|
|
|
$out[] = '# ChatGPT Crawlers are not welcome';
|
|
|
|
$out[] = '# Ref: https://platform.openai.com/docs/plugins/bot';
|
|
|
|
$out[] = 'User-agent: ChatGPT-User';
|
|
|
|
$out[] = 'Disallow: /';
|
|
|
|
$out[] = 'User-agent: GPTBot';
|
|
|
|
$out[] = 'Disallow: /';
|
|
|
|
$out[] = '# Google Gemini AI Crawlers are also not welcome';
|
|
|
|
$out[] = '# Ref: https://developers.google.com/search/docs/crawling-indexing/overview-google-crawlers?hl=en#google-extended';
|
|
|
|
$out[] = 'User-agent: Google-Extended';
|
|
|
|
$out[] = 'Disallow: /';
|
2024-04-20 21:36:07 +02:00
|
|
|
$out[] = '# CCBot (ab)used to train LLMs';
|
|
|
|
$out[] = '# Ref: https://darkvisitors.com/agents/ccbot';
|
|
|
|
$out[] = 'User-agent: CCBot';
|
|
|
|
$out[] = 'Disallow: /';
|
|
|
|
$out[] = '# Facebook LLM Bot';
|
|
|
|
$out[] = '# Ref: https://developers.facebook.com/docs/sharing/bot/';
|
|
|
|
$out[] = 'User-agent: FacebookBot';
|
|
|
|
$out[] = 'Disallow: /';
|
|
|
|
$out[] = '# DiffBot, though this one is known to have option to ignore robotstxt';
|
|
|
|
$out[] = '# Ref https://docs.diffbot.com/docs/why-is-my-crawl-not-crawling-and-other-uncommon-crawl-problems';
|
|
|
|
$out[] = 'User-agent: Diffbot';
|
|
|
|
$out[] = 'Disallow: /';
|
|
|
|
$out[] = '# Bytespider';
|
|
|
|
$out[] = '# Ref: https://darkvisitors.com/agents/bytespider';
|
|
|
|
$out[] = 'User-agent: Bytespider';
|
|
|
|
$out[] = 'Disallow: /';
|
2024-04-16 18:35:10 +02:00
|
|
|
// Crawl-delay entries at the bottom
|
|
|
|
// Ref: https://github.com/otwcode/otwarchive/pull/4411#discussion_r1044351129
|
2024-04-16 14:04:16 +02:00
|
|
|
$out[] = 'User-agent: *';
|
2024-04-15 12:14:13 +02:00
|
|
|
$out[] = 'Crawl-delay: 1';
|
|
|
|
|
|
|
|
$content = implode("\n", $out)."\n";
|
|
|
|
|
|
|
|
return id(new AphrontPlainTextResponse())
|
|
|
|
->setContent($content)
|
|
|
|
->setCacheDurationInSeconds(phutil_units('2 hours in seconds'))
|
|
|
|
->setCanCDN(true);
|
|
|
|
}
|
|
|
|
}
|