Skip to content

Commit

Permalink
Add option to allow all URLs to be crawlable via robots.txt
Browse files Browse the repository at this point in the history
  • Loading branch information
acelaya committed Apr 21, 2024
1 parent a89b53a commit b9b6ec2
Show file tree
Hide file tree
Showing 4 changed files with 22 additions and 3 deletions.
13 changes: 13 additions & 0 deletions config/autoload/robots.global.php
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
<?php

declare(strict_types=1);

namespace Shlinkio\Shlink\Core;

return [

'robots' => [
'allow-all' => (bool) Config\EnvVars::ROBOTS_ALLOW_ALL->loadFromEnv(false),
],

];
2 changes: 1 addition & 1 deletion module/Core/config/dependencies.config.php
Original file line number Diff line number Diff line change
Expand Up @@ -189,7 +189,7 @@
'Logger_Shlink',
Options\QrCodeOptions::class,
],
Action\RobotsAction::class => [Crawling\CrawlingHelper::class],
Action\RobotsAction::class => [Crawling\CrawlingHelper::class, 'config.robots.allow-all'],

ShortUrl\Resolver\PersistenceShortUrlRelationResolver::class => [
'em',
Expand Down
9 changes: 7 additions & 2 deletions module/Core/src/Action/RobotsAction.php
Original file line number Diff line number Diff line change
Expand Up @@ -15,9 +15,9 @@

use const PHP_EOL;

class RobotsAction implements RequestHandlerInterface, StatusCodeInterface
readonly class RobotsAction implements RequestHandlerInterface, StatusCodeInterface
{
public function __construct(private readonly CrawlingHelperInterface $crawlingHelper)
public function __construct(private CrawlingHelperInterface $crawlingHelper, private bool $allowAll = false)
{
}

Expand All @@ -37,6 +37,11 @@ private function buildRobots(): iterable
ROBOTS;

if ($this->allowAll) {
yield 'Disallow: /rest';
return;

Check warning on line 42 in module/Core/src/Action/RobotsAction.php

View check run for this annotation

Codecov / codecov/patch

module/Core/src/Action/RobotsAction.php#L41-L42

Added lines #L41 - L42 were not covered by tests
}

$shortCodes = $this->crawlingHelper->listCrawlableShortCodes();
foreach ($shortCodes as $shortCode) {
yield sprintf('Allow: /%s%s', $shortCode, PHP_EOL);
Expand Down
1 change: 1 addition & 0 deletions module/Core/src/Config/EnvVars.php
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,7 @@ enum EnvVars: string
case TIMEZONE = 'TIMEZONE';
case MULTI_SEGMENT_SLUGS_ENABLED = 'MULTI_SEGMENT_SLUGS_ENABLED';
case MEMORY_LIMIT = 'MEMORY_LIMIT';
case ROBOTS_ALLOW_ALL = 'ROBOTS_ALLOW_ALL';

public function loadFromEnv(mixed $default = null): mixed
{
Expand Down

0 comments on commit b9b6ec2

Please sign in to comment.