From adfae287e9a147feb0eeb861cd3f2898c3861aae Mon Sep 17 00:00:00 2001 From: Sabelo Mhlambi Date: Wed, 28 Sep 2022 22:37:52 -0500 Subject: [PATCH] update robots.txt to allow homepage to be crawled for google search results --- perma_web/perma/views/common.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/perma_web/perma/views/common.py b/perma_web/perma/views/common.py index b516d53e5..e77c49f8c 100755 --- a/perma_web/perma/views/common.py +++ b/perma_web/perma/views/common.py @@ -498,7 +498,7 @@ def robots_txt(request): from ..urls import urlpatterns disallowed_prefixes = ['_', 'archive-', 'api_key', 'errors', 'log', 'manage', 'password', 'register', 'service', 'settings', 'sign-up'] - allow = [] + allow = ['/$'] # some urlpatterns do not have names names = [urlpattern.name for urlpattern in urlpatterns if urlpattern.name is not None] for name in names: